ovenstarter/config/Server.xml

412 lines
12 KiB
XML
Raw Normal View History

2024-06-21 22:40:58 +00:00
<?xml version="1.0" encoding="UTF-8" ?>
<!--
This is a stripped down config meant for streamers looking to self-host, rebroadcast
and maybe offer transcoding for their content. OvenMediaEngine supports extra features
not covered here like VOD storage, scheduled contents and multiplexing, check the website
https://airensoft.gitbook.io/ovenmediaengine for a more comprehesive feature list.
-->
<Server version="8">
<!-- Not sure this is used anywhere, just call it whatever -->
<Name>Your stream "service" name</Name>
<!-- Single node setup -->
<Type>origin</Type>
<!-- Specify IP address to bind ("*" means all IPv4 IPs, "::" means all IPv6 IPs) -->
<!-- Uncomment the line below to enable IPv6 -->
<!-- <IP>::</IP> -->
<IP>*</IP>
<!-- Anonymize IPs in logs for compliance with privacy laws -->
<PrivacyProtection>true</PrivacyProtection>
<!--
To get the public IP address(mapped address of stun) of the local server.
This is useful when OME cannot obtain a public IP from an interface, such as AWS or docker environment.
If this is successful, you can use ${PublicIP} in your settings.
-->
<StunServer>stun.ovenmediaengine.com:13478</StunServer>
<Modules>
<!--
Currently OME only supports h2 like all browsers do. Therefore, HTTP/2 only works on TLS ports.
-->
<HTTP2>
<Enable>true</Enable>
</HTTP2>
<!--
Low Latency HLS is a Apple-specific thing, don't expect it to work elsewhere!
Other clients will treat is as HLS with its normal latency of a few seconds.
-->
<LLHLS>
<Enable>true</Enable>
</LLHLS>
<!-- P2P works only in WebRTC and is experiment feature -->
<P2P>
<!-- disabled by default -->
<Enable>false</Enable>
<MaxClientPeersPerHostPeer>2</MaxClientPeersPerHostPeer>
</P2P>
</Modules>
<!-- Fuck with this to do your performance tuning and port shuffling -->
<Bind>
<Managers>
<API>
<Port>8081</Port>
<WorkerCount>1</WorkerCount>
</API>
</Managers>
<Providers>
<RTSPC>
<WorkerCount>1</WorkerCount>
</RTSPC>
<OVT>
<WorkerCount>1</WorkerCount>
</OVT>
<RTMP>
<Port>1935</Port>
<WorkerCount>1</WorkerCount>
</RTMP>
<SRT>
<Port>9999</Port>
<WorkerCount>1</WorkerCount>
</SRT>
<WebRTC>
<Signalling>
<Port>3333</Port>
<TLSPort>3334</TLSPort>
<WorkerCount>1</WorkerCount>
</Signalling>
<IceCandidates>
<!-- Uncomment the line below to use IPv6 ICE Candidate -->
<!-- <IceCandidate>[::]:10000-10004/udp</IceCandidate> -->
<IceCandidate>*:10000/udp</IceCandidate>
<TcpRelay>*:3478</TcpRelay>
<TcpForce>false</TcpForce>
<TcpRelayWorkerCount>1</TcpRelayWorkerCount>
</IceCandidates>
</WebRTC>
</Providers>
<Publishers>
<!-- The OVT is protocol for ORIGIN-EDGE -->
<OVT>
<Port>9000</Port>
<WorkerCount>1</WorkerCount>
</OVT>
<LLHLS>
<!--
OME only supports h2, so LLHLS works over HTTP/1.1 on non-TLS ports.
LLHLS works with higher performance over HTTP/2,
so it is recommended to use a TLS port.
-->
<Port>3333</Port>
<TLSPort>3334</TLSPort>
<WorkerCount>1</WorkerCount>
</LLHLS>
<WebRTC>
<Signalling>
<Port>3333</Port>
<TLSPort>3334</TLSPort>
<WorkerCount>1</WorkerCount>
</Signalling>
<IceCandidates>
<!-- Uncomment the line below to use IPv6 ICE Candidate -->
<!-- <IceCandidate>[::]:10000-10004/udp</IceCandidate> -->
<IceCandidate>*:10001-10004/udp</IceCandidate>
<!--
If you want to stream WebRTC over TCP, specify IP:Port for TURN server.
This uses the TURN protocol, which delivers the stream from the built-in TURN server to the player's TURN client over TCP.
-->
<TcpRelay>*:3478</TcpRelay>
<!--
TcpForce is an option to force the use of TCP rather than UDP in WebRTC streaming.
(You can omit ?transport=tcp accordingly.) If <TcpRelay> is not set, playback may fail.
-->
<TcpForce>false</TcpForce>
<TcpRelayWorkerCount>1</TcpRelayWorkerCount>
</IceCandidates>
</WebRTC>
</Publishers>
</Bind>
<Managers>
<Host>
<!--
Add your TLS-covered names here, if your certificate has multiple names
(e.g. wildcard cert) add more <Name> entries like the commented one
-->
<Names>
<Name>localhost</Name>
<!-- <Name>*.localhost</Name> -->
</Names>
<!--
Bring the path to your cert and key file, if your certificate contains the whole chain (likely)
use that for the last parameter. These files are relative to the config folder
-->
<TLS>
<CertPath>cert.crt</CertPath>
<KeyPath>cert.key</KeyPath>
<ChainCertPath>cert.crt</ChainCertPath>
</TLS>
</Host>
<API>
<!-- IMPORTANT: Change this to some username:password of choice -->
<AccessToken>admin:changeme</AccessToken>
<CrossDomains>
<Url>*</Url>
</CrossDomains>
</API>
</Managers>
<VirtualHosts>
<VirtualHost>
<Name>default</Name>
<!--
Distribution is a value that can be used when grouping the same vhost distributed across multiple servers.
This value is output to the events log, so you can use it to aggregate statistics.
-->
<Distribution>localhost</Distribution>
<!--
Bring the path to your cert and key file, if your certificate contains the whole chain (likely)
use that for the last parameter. These files are relative to the config folder
-->
<Host>
<!--
Add your TLS-covered names here, if your certificate has multiple names
(e.g. wildcard cert) add more <Name> entries like the commented one
-->
<Names>
<Name>localhost</Name>
<!-- <Name>*.localhost</Name> -->
</Names>
<TLS>
<CertPath>cert.crt</CertPath>
<KeyPath>cert.key</KeyPath>
<ChainCertPath>cert.crt</ChainCertPath>
</TLS>
</Host>
<!--
This is an important bit, it prevents just about anyone from streaming on this instance.
Set the secret key to some string of choice, then use https://nebula.cafe/ome.html
to sign URLs for both streaming and playback.
-->
<SignedPolicy>
<PolicyQueryKeyName>policy</PolicyQueryKeyName>
<SignatureQueryKeyName>signature</SignatureQueryKeyName>
<!-- IMPORTANT: Change this to some alphanumeric string of choice -->
<SecretKey>CHANGEME</SecretKey>
<Enables>
<Publishers>webrtc,hls,llhls,dash,lldash</Publishers>
<Providers>rtmp,webrtc,srt</Providers>
</Enables>
</SignedPolicy>
<CrossDomains>
<Url>*</Url>
</CrossDomains>
<!--
Every application is a streaming endpoint, this example just uses one but you
could theoretically host as many streams as you want with their own settings.
-->
<Applications>
<Application>
<Name>app</Name>
<!-- Application type (live/vod) -->
<Type>live</Type>
<OutputProfiles>
<!-- Enable this configuration if you want to hardware acceleration using GPU -->
<HWAccels>
<Decoder>
<Enable>false</Enable>
<!--
Setting for Hardware Modules.
- xma :Xilinx Media Accelerator
- qsv :Intel Quick Sync Video
- nv : Nvidia Video Codec SDK
- nilogan: Netint VPU
You can use multiple modules by separating them with commas.
For example, if you want to use xma and nv, you can set it as follows.
<Modules>[ModuleName]:[DeviceId],[ModuleName]:[DeviceId],...</Modules>
<Modules>xma:0,nv:0</Modules>
-->
<!-- <Modules>nv</Modules> -->
</Decoder>
<Encoder>
<Enable>false</Enable>
<!-- <Modules>nv</Modules> -->
</Encoder>
</HWAccels>
<OutputProfile>
<Name>stream</Name>
<OutputStreamName>stream</OutputStreamName>
<!-- Transcoding options go here -->
<Encodes>
<!--
Create bypass_video/audio for exposing the ingest streams
and serve them as "Source" options. This has basically no overhead.
-->
<Video>
<Name>bypass_video</Name>
<Bypass>true</Bypass>
</Video>
<Audio>
<Name>bypass_audio</Name>
<Bypass>True</Bypass>
</Audio>
<!--
Encode the audio as AAC and OPUS. The "BypassIfMatch" will replace the
encoding with the source audio stream if settings match.e
This has pretty negligible CPU usage.
-->
<Audio>
<Name>aac</Name>
<Codec>aac</Codec>
<Bitrate>128000</Bitrate>
<Samplerate>48000</Samplerate>
<Channel>2</Channel>
<BypassIfMatch>
<Codec>eq</Codec>
</BypassIfMatch>
</Audio>
<Audio>
<Name>opus</Name>
<Codec>opus</Codec>
<Bitrate>128000</Bitrate>
<Samplerate>48000</Samplerate>
<Channel>2</Channel>
<BypassIfMatch>
<Codec>eq</Codec>
</BypassIfMatch>
</Audio>
<!--
The following enables video transcoding.
You can have as many as you want, but they will cause a lot of CPU usage
(unless using hardware acceleration)
-->
<!--
<Video>
<Name>720p</Name>
<Codec>h264</Codec>
<Bitrate>2024000</Bitrate>
<Width>1280</Width>
<Height>720</Height>
<Framerate>30</Framerate>
<KeyFrameInterval>30</KeyFrameInterval>
<BFrames>0</BFrames>
<Preset>faster</Preset>
</Video>
-->
</Encodes>
<!--
You can provide ABR with Playlist. Currently, ABR is supported in LLHLS and WebRTC.
You can play this playlist with
LLHLS : http[s]://<domain>[:port]/<app>/<stream>/<FileName>.m3u8
WebRTC : ws[s]://<domain>[:port]/<app>/<stream>/<FileName>
Note that the keywords "playlist" and "chunklist" MUST NOT be included in FileName.
-->
<!--
The WebRTC playlist, there is no reason for using different playlists for LLHLS vs WebRTC
other than being able to provide different transcodes (if that's what you want) but mostly
it's for using OPUS instead of AAC for audio (HLS is stinky)
-->
<Playlist>
<Name>webrtc</Name>
<FileName>webrtc</FileName>
<Options>
<WebRtcAutoAbr>true</WebRtcAutoAbr>
</Options>
<!-- Each rendition is a quality option made of a video+audio encode, eg. 480p with AAC audio -->
<Rendition>
<Name>Source</Name>
<Video>bypass_video</Video>
<Audio>bypass_audio</Audio>
</Rendition>
<!-- If you enabled transcoding, you can uncomment the following and fiddle with it -->
<!--
<Rendition>
<Name>720p</Name>
<Video>720p</Video>
<Audio>opus</Audio>
</Rendition>
-->
</Playlist>
<Playlist>
<Name>llhls</Name>
<FileName>llhls</FileName>
<Rendition>
<Name>Source</Name>
<Video>bypass_video</Video>
<Audio>aac</Audio> <!-- Make sure we're using a supported audio format for HLS -->
</Rendition>
<!-- If you enabled transcoding, you can uncomment the following and fiddle with it -->
<!--
<Rendition>
<Name>720p</Name>
<Video>720p</Video>
<Audio>opus</Audio>
</Rendition>
-->
</Playlist>
</OutputProfile>
</OutputProfiles>
<Providers>
<WebRTC />
<RTMP />
<SRT />
<RTSPPull />
</Providers>
<Publishers>
<AppWorkerCount>1</AppWorkerCount>
<StreamWorkerCount>8</StreamWorkerCount>
<WebRTC>
<Timeout>30000</Timeout>
<Rtx>true</Rtx>
<Ulpfec>true</Ulpfec>
<JitterBuffer>false</JitterBuffer>
</WebRTC>
<LLHLS>
<OriginMode>false</OriginMode>
<CacheControl>
<MasterPlaylistMaxAge>0</MasterPlaylistMaxAge>
<ChunklistMaxAge>0</ChunklistMaxAge>
<ChunklistWithDirectivesMaxAge>60</ChunklistWithDirectivesMaxAge>
<SegmentMaxAge>-1</SegmentMaxAge>
<PartialSegmentMaxAge>-1</PartialSegmentMaxAge>
</CacheControl>
<ChunkDuration>0.5</ChunkDuration>
<PartHoldBack>1.5</PartHoldBack>
<SegmentDuration>6</SegmentDuration>
<SegmentCount>10</SegmentCount>
<CrossDomains>
<Url>*</Url>
</CrossDomains>
</LLHLS>
<RTMPPush>
</RTMPPush>
</Publishers>
</Application>
</Applications>
</VirtualHost>
</VirtualHosts>
</Server>