Huge News!Announcing our $40M Series B led by Abstract Ventures.Learn More
Socket
Sign inDemoInstall
Socket

kurento-client-elements

Package Overview
Dependencies
Maintainers
1
Versions
22
Alerts
File Explorer

Advanced tools

Socket logo

Install Socket

Detect and block malicious and high-risk dependencies

Install

kurento-client-elements - npm Package Compare versions

Comparing version 6.6.1 to 6.12.0

26

lib/AlphaBlending.js

@@ -85,4 +85,7 @@ /* Autogenerated with Kurento Idl */

checkType('HubPort', 'source', source, {required: true});
checkType('int', 'zOrder', zOrder, {required: true});
//
// checkType('HubPort', 'source', source, {required: true});
//
// checkType('int', 'zOrder', zOrder, {required: true});
//

@@ -137,8 +140,15 @@ var params = {

checkType('float', 'relativeX', relativeX, {required: true});
checkType('float', 'relativeY', relativeY, {required: true});
checkType('int', 'zOrder', zOrder, {required: true});
checkType('float', 'relativeWidth', relativeWidth, {required: true});
checkType('float', 'relativeHeight', relativeHeight, {required: true});
checkType('HubPort', 'port', port, {required: true});
//
// checkType('float', 'relativeX', relativeX, {required: true});
//
// checkType('float', 'relativeY', relativeY, {required: true});
//
// checkType('int', 'zOrder', zOrder, {required: true});
//
// checkType('float', 'relativeWidth', relativeWidth, {required: true});
//
// checkType('float', 'relativeHeight', relativeHeight, {required: true});
//
// checkType('HubPort', 'port', port, {required: true});
//

@@ -145,0 +155,0 @@ var params = {

@@ -51,5 +51,9 @@ /* Autogenerated with Kurento Idl */

// Check iceCandidateDict has the required fields
checkType('String', 'iceCandidateDict.candidate', iceCandidateDict.candidate, {required: true});
checkType('String', 'iceCandidateDict.sdpMid', iceCandidateDict.sdpMid, {required: true});
checkType('int', 'iceCandidateDict.sdpMLineIndex', iceCandidateDict.sdpMLineIndex, {required: true});
//
// checkType('String', 'iceCandidateDict.candidate', iceCandidateDict.candidate, {required: true});
//
// checkType('String', 'iceCandidateDict.sdpMid', iceCandidateDict.sdpMid, {required: true});
//
// checkType('int', 'iceCandidateDict.sdpMLineIndex', iceCandidateDict.sdpMLineIndex, {required: true});
//

@@ -56,0 +60,0 @@ // Init parent class

@@ -50,6 +50,11 @@ /* Autogenerated with Kurento Idl */

// Check iceCandidatePairDict has the required fields
checkType('String', 'iceCandidatePairDict.streamID', iceCandidatePairDict.streamID, {required: true});
checkType('int', 'iceCandidatePairDict.componentID', iceCandidatePairDict.componentID, {required: true});
checkType('String', 'iceCandidatePairDict.localCandidate', iceCandidatePairDict.localCandidate, {required: true});
checkType('String', 'iceCandidatePairDict.remoteCandidate', iceCandidatePairDict.remoteCandidate, {required: true});
//
// checkType('String', 'iceCandidatePairDict.streamID', iceCandidatePairDict.streamID, {required: true});
//
// checkType('int', 'iceCandidatePairDict.componentID', iceCandidatePairDict.componentID, {required: true});
//
// checkType('String', 'iceCandidatePairDict.localCandidate', iceCandidatePairDict.localCandidate, {required: true});
//
// checkType('String', 'iceCandidatePairDict.remoteCandidate', iceCandidatePairDict.remoteCandidate, {required: true});
//

@@ -56,0 +61,0 @@ // Init parent class

@@ -48,5 +48,9 @@ /* Autogenerated with Kurento Idl */

// Check iceConnectionDict has the required fields
checkType('String', 'iceConnectionDict.streamId', iceConnectionDict.streamId, {required: true});
checkType('int', 'iceConnectionDict.componentId', iceConnectionDict.componentId, {required: true});
checkType('IceComponentState', 'iceConnectionDict.state', iceConnectionDict.state, {required: true});
//
// checkType('String', 'iceConnectionDict.streamId', iceConnectionDict.streamId, {required: true});
//
// checkType('int', 'iceConnectionDict.componentId', iceConnectionDict.componentId, {required: true});
//
// checkType('IceComponentState', 'iceConnectionDict.state', iceConnectionDict.state, {required: true});
//

@@ -53,0 +57,0 @@ // Init parent class

@@ -25,7 +25,7 @@ /* Autogenerated with Kurento Idl */

* Media Profile.
* Currently WEBM, MP4 and JPEG are supported.
* Currently WEBM, MKV, MP4 and JPEG are supported.
*
* @typedef elements/complexTypes.MediaProfileSpecType
*
* @type {(WEBM|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER)}
* @type {(WEBM|MKV|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MKV_VIDEO_ONLY|MKV_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER)}
*/

@@ -46,4 +46,4 @@

if(!value.match('WEBM|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER'))
throw SyntaxError(key+' param is not one of [WEBM|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER] ('+value+')');
if(!value.match('WEBM|MKV|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MKV_VIDEO_ONLY|MKV_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER'))
throw SyntaxError(key+' param is not one of [WEBM|MKV|MP4|WEBM_VIDEO_ONLY|WEBM_AUDIO_ONLY|MKV_VIDEO_ONLY|MKV_AUDIO_ONLY|MP4_VIDEO_ONLY|MP4_AUDIO_ONLY|JPEG_VIDEO_ONLY|KURENTO_SPLIT_RECORDER] ('+value+')');
};

@@ -50,0 +50,0 @@

@@ -35,4 +35,31 @@ /* Autogenerated with Kurento Idl */

* @property {external:String} key
* A string representing the cryptographic key used. The length varies
* depending on the cryptographic method used (30 bytes length for AES_128_CM,
* <p>Master key and salt (plain text)</p>
* <p>
* This field provides the the cryptographic master key appended with
* </p>
* <p>
* The expected length of the key (as provided to this parameter) is
* determined by the crypto-suite for which the key applies (30
* characters for AES_CM_128, 46 characters for AES_CM_256). If the
* length does not match the expected value, the key will be
* considered invalid.
* </p>
* <p>
* If no key is provided, a random one will be generated using the
* `getrandom` system call.
* </p>
* @property {external:String} keyBase64
* <p>Master key and salt (base64 encoded)</p>
* <p>
* This field provides the cryptographic master key appended with the
* </p>
* <p>
* The expected length of the key (after being decoded from base64)
* is determined by the crypto-suite for which the key applies (30
* bytes for AES_CM_128, 46 bytes for AES_CM_256). If the length does
* </p>
* <p>
* If no key is provided, a random one will be generated using the
* `getrandom` system call.
* </p>
* @property {module:elements/complexTypes.CryptoSuite} crypto

@@ -48,4 +75,9 @@ * Selects the cryptographic suite to be used. For available values, please see

// Check sDESDict has the required fields
checkType('String', 'sDESDict.key', sDESDict.key);
checkType('CryptoSuite', 'sDESDict.crypto', sDESDict.crypto);
//
// checkType('String', 'sDESDict.key', sDESDict.key);
//
// checkType('String', 'sDESDict.keyBase64', sDESDict.keyBase64);
//
// checkType('CryptoSuite', 'sDESDict.crypto', sDESDict.crypto);
//

@@ -62,2 +94,7 @@ // Init parent class

},
keyBase64: {
writable: true,
enumerable: true,
value: sDESDict.keyBase64
},
crypto: {

@@ -64,0 +101,0 @@ writable: true,

@@ -49,6 +49,11 @@ /* Autogenerated with Kurento Idl */

// Check videoInfoDict has the required fields
checkType('boolean', 'videoInfoDict.isSeekable', videoInfoDict.isSeekable, {required: true});
checkType('int64', 'videoInfoDict.seekableInit', videoInfoDict.seekableInit, {required: true});
checkType('int64', 'videoInfoDict.seekableEnd', videoInfoDict.seekableEnd, {required: true});
checkType('int64', 'videoInfoDict.duration', videoInfoDict.duration, {required: true});
//
// checkType('boolean', 'videoInfoDict.isSeekable', videoInfoDict.isSeekable, {required: true});
//
// checkType('int64', 'videoInfoDict.seekableInit', videoInfoDict.seekableInit, {required: true});
//
// checkType('int64', 'videoInfoDict.seekableEnd', videoInfoDict.seekableEnd, {required: true});
//
// checkType('int64', 'videoInfoDict.duration', videoInfoDict.duration, {required: true});
//

@@ -55,0 +60,0 @@ // Init parent class

@@ -83,4 +83,7 @@ /* Autogenerated with Kurento Idl */

checkType('HubPort', 'source', source, {required: true});
checkType('HubPort', 'sink', sink, {required: true});
//
// checkType('HubPort', 'source', source, {required: true});
//
// checkType('HubPort', 'sink', sink, {required: true});
//

@@ -87,0 +90,0 @@ var params = {

@@ -111,3 +111,5 @@ /* Autogenerated with Kurento Idl */

checkType('HubPort', 'source', source, {required: true});
//
// checkType('HubPort', 'source', source, {required: true});
//

@@ -114,0 +116,0 @@ var params = {

@@ -64,6 +64,33 @@ /* Autogenerated with Kurento Idl */

* @property {external:Boolean} [useEncodedMedia]
* configures the endpoint to use encoded media instead of raw media. If the
* parameter is not set then the element uses raw media. Changing this
* parameter could affect in a severe way to stability because key frames lost
* will not be generated. Changing the media type does not affect to the result
* Feed the input media as-is to the Media Pipeline, instead of first decoding
* it.
* <p>
* When this property is not enabled, the input media gets always
* </p>
* <p>
* When this property is enabled, the explained behavior gets
* disabled. Instead, The endpoint will provide any input media
* directly to the Media Pipeline, without prior decoding.
* Enabling this mode of operation could have a severe effect on
* stability, because lost video keyframes will not be
* regenerated; however, avoiding a full cycle of decoding and
* encoding can be very useful for certain applications, because
* it improves performance by greatly reducing the CPU processing
* </p>
* <p>
* Keep in mind that if this property is enabled, the original
* source media MUST already have an encoding format which is
* compatible with the destination target. For example: given a
* pipeline which uses this endpoint to read a file and then
* streams it to a WebRTC browser such as Chrome, then the file
* must already be encoded with a VP8 or H.264 codec profile
* which Chrome is able to decode. Note that for this example,
* most browsers don't support ANY combination of H.264 encoding
* options; instead, they tend to support only a very specific
* subset of the codec features (also known as 'profiles').
* </p>
* <p>
* We strongly recommend to avoid using this option, because
* correct behavior cannot be guaranteed.
* </p>
*/

@@ -70,0 +97,0 @@ HttpPostEndpoint.constructorParams = {

@@ -29,3 +29,3 @@ /* Autogenerated with Kurento Idl */

Object.defineProperty(exports, 'name', {value: 'elements'});
Object.defineProperty(exports, 'version', {value: '6.6.1'});
Object.defineProperty(exports, 'version', {value: '6.12.0'});

@@ -32,0 +32,0 @@

@@ -85,5 +85,9 @@ /* Autogenerated with Kurento Idl */

checkType('MediaType', 'media', media, {required: true});
checkType('HubPort', 'source', source, {required: true});
checkType('HubPort', 'sink', sink, {required: true});
//
// checkType('MediaType', 'media', media, {required: true});
//
// checkType('HubPort', 'source', source, {required: true});
//
// checkType('HubPort', 'sink', sink, {required: true});
//

@@ -128,5 +132,9 @@ var params = {

checkType('MediaType', 'media', media, {required: true});
checkType('HubPort', 'source', source, {required: true});
checkType('HubPort', 'sink', sink, {required: true});
//
// checkType('MediaType', 'media', media, {required: true});
//
// checkType('HubPort', 'source', source, {required: true});
//
// checkType('HubPort', 'sink', sink, {required: true});
//

@@ -133,0 +141,0 @@ var params = {

@@ -120,2 +120,34 @@ /* Autogenerated with Kurento Idl */

/**
* Returns the GStreamer DOT string for this element's private pipeline
*
* @alias module:elements.PlayerEndpoint#getElementGstreamerDot
*
* @param {module:elements.PlayerEndpoint~getElementGstreamerDotCallback} [callback]
*
* @return {external:Promise}
*/
PlayerEndpoint.prototype.getElementGstreamerDot = function(callback){
var transaction = (arguments[0] instanceof Transaction)
? Array.prototype.shift.apply(arguments)
: undefined;
var usePromise = false;
if (callback == undefined) {
usePromise = true;
}
if(!arguments.length) callback = undefined;
callback = (callback || noop).bind(this)
return disguise(this._invoke(transaction, 'getElementGstreamerDot', callback), this)
};
/**
* @callback module:elements.PlayerEndpoint~getElementGstreamerDotCallback
* @param {external:Error} error
* @param {external:String} result
*/
/**
* Get or set the actual position of the video in ms. <hr/><b>Note</b> Setting

@@ -169,3 +201,5 @@ * the position only works for seekable videos

checkType('int64', 'position', position, {required: true});
//
// checkType('int64', 'position', position, {required: true});
//

@@ -265,3 +299,3 @@ var params = {

* @property {external:Integer} [networkCache]
* When using rtsp sources. Amount of ms to buffer
* When using RTSP sources: Amount of milliseconds to buffer
*

@@ -278,3 +312,33 @@ * @property {external:String} uri

* @property {external:Boolean} [useEncodedMedia]
* use encoded instead of raw media. If the parameter is false then the element
* Feed the input media as-is to the Media Pipeline, instead of first decoding
* it.
* <p>
* When this property is not enabled, the input media gets always
* </p>
* <p>
* When this property is enabled, the explained behavior gets
* disabled. Instead, The endpoint will provide any input media
* directly to the Media Pipeline, without prior decoding.
* Enabling this mode of operation could have a severe effect on
* stability, because lost video keyframes will not be
* regenerated; however, avoiding a full cycle of decoding and
* encoding can be very useful for certain applications, because
* it improves performance by greatly reducing the CPU processing
* </p>
* <p>
* Keep in mind that if this property is enabled, the original
* source media MUST already have an encoding format which is
* compatible with the destination target. For example: given a
* pipeline which uses this endpoint to read a file and then
* streams it to a WebRTC browser such as Chrome, then the file
* must already be encoded with a VP8 or H.264 codec profile
* which Chrome is able to decode. Note that for this example,
* most browsers don't support ANY combination of H.264 encoding
* options; instead, they tend to support only a very specific
* subset of the codec features (also known as 'profiles').
* </p>
* <p>
* We strongly recommend to avoid using this option, because
* correct behavior cannot be guaranteed.
* </p>
*/

@@ -281,0 +345,0 @@ PlayerEndpoint.constructorParams = {

@@ -45,94 +45,151 @@ /* Autogenerated with Kurento Idl */

* <p>
* Provides the functionality to store contents. The recorder can store
* in local files or in a network resource. It receives a media stream
* from another MediaElement (i.e. the source), and stores it in the
* designated location.
* </p>
* <p>
* The following information has to be provided In order to create a
* RecorderEndpoint, and can’t be changed afterwards:
* <ul>
* <li>
* URI of the resource where media will be stored. Following schemas
* are supported:
* <ul>
* <li>
* Files: mounted in the local file system.
* <ul>
* <li>file://<path-to-file></li>
* </ul>
* <li>
* HTTP: Requires the server to support method PUT
* <ul>
* <li>
* http(s)://<server-ip>/path/to/file
* </li>
* <li>
* http(s)://username:password@<server-ip>/path/to/file
* </li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* Relative URIs (with no schema) are supported. They are completed
* prepending a default URI defined by property defaultPath. This
* property allows using relative paths instead of absolute paths. If
* </li>
* <li>
* The media profile used to store the file. This will determine the
* encoding. See below for more details about media profile
* </li>
* <li>
* Optionally, the user can select if the endpoint will stop
* processing once the EndOfStream event is detected.
* </li>
* </ul>
* <p>
* </p>
* RecorderEndpoint requires access to the resource where stream is going
* <p>
* </p>
* The media profile is quite an important parameter, as it will
* determine whether there is a transcodification or not. If the input
* stream codec if not compatible with the selected media profile, the
* media will be transcoded into a suitable format, before arriving at
* the RecorderEndpoint's sink pad. This will result in a higher CPU load
* <ul>
* <li>WEBM: No transcodification will take place.</li>
* <li>MP4: The media server will have to transcode the media received
* from VP8 to H264. This will raise the CPU load in the system.</li>
* </ul>
* <p>
* </p>
* Recording will start as soon as the user invokes the record method.
* The recorder will then store, in the location indicated, the media
* that the source is sending to the endpoint’s sink. If no media is
* being received, or no endpoint has been connected, then the
* destination will be empty. The recorder starts storing information
* into the file as soon as it gets it.
* <p>
* </p>
* When another endpoint is connected to the recorder, by default both
* AUDIO and VIDEO media types are expected, unless specified otherwise
* when invoking the connect method. Failing to provide both types, will
* result in teh recording buffering the received media: it won’t be
* written to the file until the recording is stopped. This is due to the
* <p>
* </p>
* The source endpoint can be hot-swapped, while the recording is taking
* place. The recorded file will then contain different feeds. When
* switching video sources, if the new video has different size, the
* recorder will retain the size of the previous source. If the source is
* <p>
* </p>
* It is recommended to start recording only after media arrives, either
* to the endpoint that is the source of the media connected to the
* recorder, to the recorder itself, or both. Users may use the
* MediaFlowIn and MediaFlowOut events, and synchronise the recording
* with the moment media comes in. In any case, nothing will be stored in
* <p>
* </p>
* Stopping the recording process is done through the stopAndWait method,
* </p>
* Provides the functionality to store contents. The recorder can store in
* local
* files or in a network resource. It receives a media stream from another
* MediaElement (i.e. the source), and stores it in the designated location.
* </p>
* <p>
* The following information has to be provided In order to create a
* RecorderEndpoint, and cannot be changed afterwards:
* </p>
* <ul>
* <li>
* URI of the resource where media will be stored. Following schemas are
* supported:
* <ul>
* <li>
* Files: mounted in the local file system.
* <ul>
* <li><code>file:///path/to/file</code></li>
* </ul>
* </li>
* <li>
* HTTP: Requires the server to support method PUT
* <ul>
* <li><code>http(s)://{server-ip}/path/to/file</code></li>
* <li>
* <code>http(s)://username:password@{server-ip}/path/to/file</code>
* </li>
* </ul>
* </li>
* </ul>
* </li>
* <li>
* Relative URIs (with no schema) are supported. They are completed
* prepending
* a default URI defined by property <i>defaultPath</i>. This property is
* defined in the configuration file
* <i>/etc/kurento/modules/kurento/UriEndpoint.conf.ini</i>, and the
* default
* value is <code>file:///var/lib/kurento/</code>
* </li>
* <li>
* The media profile (@MediaProfileSpecType) used to store the file. This
* will
* determine the encoding. See below for more details about media profile.
* </li>
* <li>
* Optionally, the user can select if the endpoint will stop processing
* once
* the EndOfStream event is detected.
* </li>
* </ul>
* <p>
* RecorderEndpoint requires access to the resource where stream is going to
* be
* recorded. If it's a local file (<code>file://</code>), the system user
* running
* the media server daemon (kurento by default), needs to have write
* permissions
* for that URI. If it's an HTTP server, it must be accessible from the
* machine
* where media server is running, and also have the correct access rights.
* Otherwise, the media server won't be able to store any information, and an
* ErrorEvent will be fired. Please note that if you haven't subscribed to
* that
* type of event, you can be left wondering why your media is not being
* saved,
* while the error message was ignored.
* </p>
* <p>
* The media profile is quite an important parameter, as it will determine
* whether the server needs to perform on-the-fly transcoding of the media.
* If
* the input stream codec if not compatible with the selected media profile,
* the
* media will be transcoded into a suitable format. This will result in a
* higher
* CPU load and will impact overall performance of the media server.
* </p>
* For example: Say that your pipeline will receive <b>VP8</b>-encoded video
* from
* WebRTC, and sends it to a RecorderEndpoint; depending on the format
* selected...
* <ul>
* <li>
* WEBM: The input codec is the same as the recording format, so no
* transcoding
* will take place.
* </li>
* <li>
* MP4: The media server will have to transcode from <b>VP8</b> to
* <b>H264</b>.
* This will raise the CPU load in the system.
* </li>
* <li>
* MKV: Again, video must be transcoded from <b>VP8</b> to <b>H264</b>,
* which
* means more CPU load.
* </li>
* </ul>
* From this you can see how selecting the correct format for your application
* is a
* very important decision.
* <p>
* Recording will start as soon as the user invokes the record method. The
* recorder will then store, in the location indicated, the media that the
* source
* is sending to the endpoint's sink. If no media is being received, or no
* endpoint has been connected, then the destination will be empty. The
* recorder
* starts storing information into the file as soon as it gets it.
* </p>
* <p>
* When another endpoint is connected to the recorder, by default both AUDIO
* and
* VIDEO media types are expected, unless specified otherwise when invoking
* the
* connect method. Failing to provide both types, will result in teh
* recording
* buffering the received media: it won't be written to the file until the
* recording is stopped. This is due to the recorder waiting for the other
* type
* of media to arrive, so they are synchronized.
* </p>
* <p>
* The source endpoint can be hot-swapped, while the recording is taking
* place.
* The recorded file will then contain different feeds. When switching video
* sources, if the new video has different size, the recorder will retain the
* size of the previous source. If the source is disconnected, the last frame
* recorded will be shown for the duration of the disconnection, or until the
* recording is stopped.
* </p>
* <p>
* It is recommended to start recording only after media arrives, either to
* the
* endpoint that is the source of the media connected to the recorder, to the
* recorder itself, or both. Users may use the MediaFlowIn and MediaFlowOut
* events, and synchronize the recording with the moment media comes in. In
* any
* case, nothing will be stored in the file until the first media packets
* arrive.
* </p>
* <p>
* Stopping the recording process is done through the stopAndWait method,
* which
* will return only after all the information was stored correctly. If the
* file
* is empty, this means that no media arrived at the recorder.
* </p>
*

@@ -139,0 +196,0 @@ * @extends module:core/abstracts.UriEndpoint

@@ -80,5 +80,4 @@ /* Autogenerated with Kurento Idl */

* <p>
* While there is no congestion control in this endpoint, the user can
* set some bandwidth limits that will be used during the negotiation
* process.
* The user can set some bandwidth limits that will be used during the
* negotiation process.
* The default bandwidth range of the endpoint is 100kbps-500kbps, but it

@@ -118,6 +117,5 @@ * <ul style='list-style-type:circle'>

* <p>
* Having no congestion ocntrol implementation means that the bitrate
* will remain constant. This is something to take into consideration
* when setting upper limits for the output bandwidth, or the local
* network connection can be overflooded.
* Take into consideration that setting a too high upper limit for the
* output bandwidth can be a reason for the local network connection to
* be overflooded.
* </p>

@@ -124,0 +122,0 @@ *

@@ -383,3 +383,5 @@ /* Autogenerated with Kurento Idl */

checkType('String', 'stunServerAddress', stunServerAddress, {required: true});
//
// checkType('String', 'stunServerAddress', stunServerAddress, {required: true});
//

@@ -446,3 +448,5 @@ var params = {

checkType('int', 'stunServerPort', stunServerPort, {required: true});
//
// checkType('int', 'stunServerPort', stunServerPort, {required: true});
//

@@ -511,3 +515,5 @@ var params = {

checkType('String', 'turnUrl', turnUrl, {required: true});
//
// checkType('String', 'turnUrl', turnUrl, {required: true});
//

@@ -549,3 +555,5 @@ var params = {

checkType('IceCandidate', 'candidate', candidate, {required: true});
//
// checkType('IceCandidate', 'candidate', candidate, {required: true});
//

@@ -582,3 +590,5 @@ var params = {

checkType('int', 'channelId', channelId, {required: true});
//
// checkType('int', 'channelId', channelId, {required: true});
//

@@ -672,7 +682,13 @@ var params = {

checkType('String', 'label', label);
checkType('boolean', 'ordered', ordered);
checkType('int', 'maxPacketLifeTime', maxPacketLifeTime);
checkType('int', 'maxRetransmits', maxRetransmits);
checkType('String', 'protocol', protocol);
//
// checkType('String', 'label', label);
//
// checkType('boolean', 'ordered', ordered);
//
// checkType('int', 'maxPacketLifeTime', maxPacketLifeTime);
//
// checkType('int', 'maxRetransmits', maxRetransmits);
//
// checkType('String', 'protocol', protocol);
//

@@ -740,2 +756,8 @@ var params = {

*
* @property {external:Boolean} [recvonly]
* Single direction, receive-only endpoint
*
* @property {external:Boolean} [sendonly]
* Single direction, send-only endpoint
*
* @property {external:Boolean} [useDataChannels]

@@ -751,2 +773,6 @@ * Activate data channels support

},
recvonly: {
type: 'boolean' },
sendonly: {
type: 'boolean' },
useDataChannels: {

@@ -753,0 +779,0 @@ type: 'boolean' }

{
"name": "kurento-client-elements",
"version": "6.6.1",
"description": "Elements implementations for kurento media server",
"main": "lib/index.js",
"version": "6.12.0",
"description": "JavaScript Client API for Kurento Media Server",
"repository": {
"type": "git",
"url": "Kurento/kurento-client-elements-js"
"url": "https://github.com/Kurento/kurento-client-elements-js.git"
},

@@ -15,2 +14,3 @@ "keywords": [

],
"main": "lib/index.js",
"license": "ALv2",

@@ -17,0 +17,0 @@ "homepage": "http://www.kurento.com",

@@ -16,3 +16,3 @@ [![License badge](https://img.shields.io/badge/license-Apache2-orange.svg)](http://www.apache.org/licenses/LICENSE-2.0)

Module "elements" description: Elements implementations for kurento media server
Module "elements" description: JavaScript Client API for Kurento Media Server

@@ -19,0 +19,0 @@ What is Kurento

@@ -15,3 +15,3 @@ [![License badge](https://img.shields.io/badge/license-Apache2-orange.svg)](http://www.apache.org/licenses/LICENSE-2.0)

Module "elements" description: Elements implementations for kurento media server
Module "elements" description: JavaScript Client API for Kurento Media Server

@@ -18,0 +18,0 @@ What is Kurento

{
"name": "elements",
"version": "6.6.1",
"kurentoVersion": "^6.1.0",
"version": "6.12.0",
"kurentoVersion": "^6.7.0",
"imports": [
{
"name": "core",
"version": "^6.1.0",
"mavenVersion": "[6.1.0,7.0.0-SNAPSHOT)",
"npmVersion": ">=6.1.0 <7.0.0"
"version": "^6.7.0",
"mavenVersion": "[6.7.0,7.0.0-SNAPSHOT)",
"npmVersion": ">=6.7.0 <7.0.0"
}

@@ -18,3 +18,3 @@ ],

"mavenArtifactId": "kms-api-elements",
"mavenVersion": "6.6.1"
"mavenVersion": "6.12.0"
}

@@ -27,9 +27,9 @@ },

"mavenArtifactId": "kurento-client",
"mavenVersion": "6.6.1"
"mavenVersion": "6.12.0"
},
"js": {
"nodeName": "kurento-client-elements",
"npmDescription": "Elements implementations for kurento media server",
"npmDescription": "JavaScript Client API for Kurento Media Server",
"npmGit": "Kurento/kurento-client-elements-js",
"npmVersion": "6.6.1"
"npmVersion": "6.12.0"
}

@@ -44,4 +44,4 @@ },

{
"name": "DispatcherOneToMany",
"doc": "A :rom:cls:`Hub` that sends a given source to all the connected sinks",
"name": "AlphaBlending",
"doc": "A :rom:cls:`Hub` that mixes the :rom:attr:`MediaType.AUDIO` stream of its connected sources and constructs one output with :rom:attr:`MediaType.VIDEO` streams of its connected sources into its sink",
"extends": "Hub",

@@ -56,3 +56,3 @@ "constructor": {

],
"doc": "Create a :rom:cls:`DispatcherOneToMany` belonging to the given pipeline."
"doc": "Create for the given pipeline"
},

@@ -64,13 +64,49 @@ "methods": [

"name": "source",
"doc": "source to be broadcasted",
"doc": "The reference to the HubPort setting as master port",
"type": "HubPort"
},
{
"name": "zOrder",
"doc": "The order in z to draw the master image",
"type": "int"
}
],
"name": "setSource",
"doc": "Sets the source port that will be connected to the sinks of every :rom:cls:`HubPort` of the dispatcher"
"name": "setMaster",
"doc": "Sets the source port that will be the master entry to the mixer"
},
{
"params": [],
"name": "removeSource",
"doc": "Remove the source port and stop the media pipeline."
"params": [
{
"name": "relativeX",
"doc": "The x position relative to the master port. Values from 0 to 1 are accepted. The value 0, indicates the coordinate 0 in the master image.",
"type": "float"
},
{
"name": "relativeY",
"doc": "The y position relative to the master port. Values from 0 to 1 are accepted. The value 0, indicates the coordinate 0 in the master image.",
"type": "float"
},
{
"name": "zOrder",
"doc": "The order in z to draw the images. The greatest value of z is in the top.",
"type": "int"
},
{
"name": "relativeWidth",
"doc": "The image width relative to the master port width. Values from 0 to 1 are accepted.",
"type": "float"
},
{
"name": "relativeHeight",
"doc": "The image height relative to the master port height. Values from 0 to 1 are accepted.",
"type": "float"
},
{
"name": "port",
"doc": "The reference to the confingured port.",
"type": "HubPort"
}
],
"name": "setPortProperties",
"doc": "Configure the blending mode of one port."
}

@@ -80,5 +116,5 @@ ]

{
"name": "WebRtcEndpoint",
"doc": "<p>\n Control interface for Kurento WebRTC endpoint.\n </p>\n <p>\n This endpoint is one side of a peer-to-peer WebRTC communication, being the other peer a WebRTC capable browser -using the RTCPeerConnection API-, a native WebRTC app or even another Kurento Media Server.\n </p>\n <p>\n In order to establish a WebRTC communication, peers engage in an SDP negotiation process, where one of the peers (the offerer) sends an offer, while the other peer (the offeree) responds with an answer. This endpoint can function in both situations\n <ul>\n <li>\n As offerer: The negotiation process is initiated by the media server\n <ul style='list-style-type:circle'>\n <li>KMS generates the SDP offer through the <code>generateOffer</code> method. This <i>offer</i> must then be sent to the remote peer (the offeree) through the signaling channel, for processing.</li>\n <li>The remote peer process the <i>offer</i>, and generates an <i>answer</i> to this <i>offer</i>. The <i>answer</i> is sent back to the media server.</li>\n <li>Upon receiving the <i>answer</i>, the endpoint must invoke the <code>processAnswer</code> method.</li>\n </ul>\n </li>\n <li>\n As offeree: The negotiation process is initiated by the remote peer\n <ul>\n <li>The remote peer, acting as offerer, generates an SDP <i>offer</i> and sends it to the WebRTC endpoint in Kurento.</li>\n <li>The endpoint will process the <i>offer</i> invoking the <code>processOffer</code> method. The result of this method will be a string, containing an SDP <i>answer</i>.</li>\n <li>The SDP <i>answer</i> must be sent back to the offerer, so it can be processed.</li>\n </ul>\n </li>\n </ul>\n </p>\n <p>\n SDPs are sent without ICE candidates, following the Trickle ICE optimization. Once the SDP negotiation is completed, both peers proceed with the ICE discovery process, intended to set up a bidirectional media connection. During this process, each peer\n <ul>\n <li>Discovers ICE candidates for itself, containing pairs of IPs and ports.</li>\n <li>ICE candidates are sent via the signaling channel as they are discovered, to the remote peer for probing.</li>\n <li>ICE connectivity checks are run as soon as the new candidate description, from the remote peer, is available.</li>\n </ul>\n Once a suitable pair of candidates (one for each peer) is discovered, the media session can start. The harvesting process in Kurento, begins with the invocation of the <code>gatherCandidates</code> method. Since the whole Trickle ICE purpose is to speed-up connectivity, candidates are generated asynchronously. Therefore, in order to capture the candidates, the user must subscribe to the event <code>IceCandidateFound</code>. It is important that the event listener is bound before invoking <code>gatherCandidates</code>, otherwise a suitable candidate might be lost, and connection might not be established.\n </p>\n <p>\n It's important to keep in mind that WebRTC connection is an asynchronous process, when designing interactions between different MediaElements. For example, it would be pointless to start recording before media is flowing. In order to be notified of state changes, the application can subscribe to events generated by the WebRtcEndpoint. Following is a full list of events generated by WebRtcEndpoint:\n <ul>\n <li>\n <code>IceComponentStateChange</code>: This event informs only about changes in the ICE connection state. Possible values are:\n <ul style='list-style-type:circle'>\n <li><code>DISCONNECTED</code>: No activity scheduled</li>\n <li><code>GATHERING</code>: Gathering local candidates</li>\n <li><code>CONNECTING</code>: Establishing connectivity</li>\n <li><code>CONNECTED</code>: At least one working candidate pair</li>\n <li><code>READY</code>: ICE concluded, candidate pair selection is now final</li>\n <li><code>FAILED</code>: Connectivity checks have been completed, but media connection was not established</li>\n </ul>\n The transitions between states are covered in RFC5245.\n It could be said that it's network-only, as it only takes into account the state of the network connection, ignoring other higher level stuff, like DTLS handshake, RTCP flow, etc. This implies that, while the component state is <code>CONNECTED</code>, there might be no media flowing between the peers. This makes this event useful only to receive low-level information about the connection between peers. Even more, while other events might leave a graceful period of time before firing, this event fires immediately after the state change is detected.\n </li>\n <li>\n <code>IceCandidateFound</code>: Raised when a new candidate is discovered. ICE candidates must be sent to the remote peer of the connection. Failing to do so for some or all of the candidates might render the connection unusable.\n </li>\n <li>\n <code>IceGatheringDone</code>: Raised when the ICE harvesting process is completed. This means that all candidates have already been discovered.\n </li>\n <li>\n <code>NewCandidatePairSelected</code>: Raised when a new ICE candidate pair gets selected. The pair contains both local and remote candidates being used for a component. This event can be raised during a media session, if a new pair of candidates with higher priority in the link are found.\n </li>\n <li>\n <code>DataChannelOpen</code>: Raised when a data channel is open.\n </li>\n <li>\n <code>DataChannelClose</code>: Raised when a data channel is closed.\n </li>\n </ul>\n </p>\n <p>\n Registering to any of above events requires the application to provide a callback function. Each event provides different information, so it is recommended to consult the signature of the event listeners.\n </p>\n <p>\n Flow control and congestion management is one of the most important features of WebRTC. WebRTC connections start with the lowest bandwidth configured and slowly ramps up to the maximum available bandwidth, or to the higher limit of the exploration range in case no bandwidth limitation is detected. Notice that WebRtcEndpoints in Kurento are designed in a way that multiple WebRTC connections fed by the same stream share quality. When a new connection is added, as it requires to start with low bandwidth, it will cause the rest of connections to experience a transient period of degraded quality, until it stabilizes its bitrate. This doesn't apply when transcoding is involved. Transcoders will adjust their output bitrate based in bandwidth requirements, but it won't affect the original stream. If an incoming WebRTC stream needs to be transcoded, for whatever reason, all WebRtcEndpoints fed from transcoder output will share a separate quality than the ones connected directly to the original stream.\n </p>\n <p>\n The default bandwidth range of the endpoint is 100kbps-500kbps, but it can be changed separately for input/output directions and for audio/video streams.\n <ul>\n <li>\n Input bandwidth control mechanism: Configuration interval used to inform remote peer the range of bitrates that can be pushed into this WebRtcEndpoint object.\n <ul style='list-style-type:circle'>\n <li>\n setMin/MaxVideoRecvBandwidth: sets Min/Max bitrate limits expected for received video stream.\n </li>\n <li>\n setMin/MaxAudioRecvBandwidth: sets Min/Max bitrate limits expected for received audio stream.\n </li>\n </ul>\n Max values are announced in the SDP, while min values are set to limit the lower value of REMB packages. It follows that min values will only have effect in peers that support this control mechanism, such as Chrome.\n </li>\n <li>\n Output bandwidth control mechanism: Configuration interval used to control bitrate of the output video stream sent to remote peer. It is important to keep in mind that pushed bitrate depends on network and remote peer capabilities. Remote peers can also announce bandwidth limitation in their SDPs (through the <code>b=<modifier>:<value></code> tag). Kurento will always enforce bitrate limitations specified by the remote peer over internal configurations.\n <ul style='list-style-type:circle'>\n <li>\n setMin/MaxVideoSendBandwidth: sets Min/Max bitrate limits for video sent to remote peer\n </li>\n </ul>\n </li>\n </ul>\n All bandwidth control parameters must be changed before the SDP negotiation takes place, and can't be changed afterwards.\n </p>\n <p>\n DataChannels allow other media elements that make use of the DataPad, to send arbitrary data. For instance, if there is a filter that publishes event information, it'll be sent to the remote peer through the channel. There is no API available for programmers to make use of this feature in the WebRtcElement. DataChannels can be configured to provide the following:\n <ul>\n <li>\n Reliable or partially reliable delivery of sent messages\n </li>\n <li>\n In-order or out-of-order delivery of sent messages\n </li>\n </ul>\n Unreliable, out-of-order delivery is equivalent to raw UDP semantics. The message may make it, or it may not, and order is not important. However, the channel can be configured to be <i>partially reliable</i> by specifying the maximum number of retransmissions or setting a time limit for retransmissions: the WebRTC stack will handle the acknowledgments and timeouts.\n </p>\n <p>\n The possibility to create DataChannels in a WebRtcEndpoint must be explicitly enabled when creating the endpoint, as this feature is disabled by default. If this is the case, they can be created invoking the createDataChannel method. The arguments for this method, all of them optional, provide the necessary configuration:\n <ul>\n <li>\n <code>label</code>: assigns a label to the DataChannel. This can help identify each possible channel separately.\n </li>\n <li>\n <code>ordered</code>: specifies if the DataChannel guarantees order, which is the default mode. If maxPacketLifetime and maxRetransmits have not been set, this enables reliable mode.\n </li>\n <li>\n <code>maxPacketLifeTime</code>: The time window in milliseconds, during which transmissions and retransmissions may take place in unreliable mode. This forces unreliable mode, even if <code>ordered</code> has been activated.\n </li>\n <li>\n <code>maxRetransmits</code>: maximum number of retransmissions that are attempted in unreliable mode. This forces unreliable mode, even if <code>ordered</code> has been activated.\n </li>\n <li>\n <code>Protocol</code>: Name of the subprotocol used for data communication.\n </li>\n </ul>\n ",
"extends": "BaseRtpEndpoint",
"name": "Composite",
"doc": "A :rom:cls:`Hub` that mixes the :rom:attr:`MediaType.AUDIO` stream of its connected sources and constructs a grid with the :rom:attr:`MediaType.VIDEO` streams of its connected sources into its sink",
"extends": "Hub",
"constructor": {

@@ -88,136 +124,93 @@ "params": [

"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the endpoint belongs",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"type": "MediaPipeline"
},
}
],
"doc": "Create for the given pipeline"
}
},
{
"name": "Dispatcher",
"doc": "A :rom:cls:`Hub` that allows routing between arbitrary port pairs",
"extends": "Hub",
"constructor": {
"params": [
{
"name": "useDataChannels",
"doc": "Activate data channels support",
"type": "boolean",
"optional": true,
"defaultValue": false
},
{
"name": "certificateKeyType",
"doc": "Define the type of the certificate used in dtls",
"type": "CertificateKeyType",
"optional": true,
"defaultValue": "RSA"
"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"type": "MediaPipeline"
}
],
"doc": "Builder for the :rom:cls:`WebRtcEndpoint`"
"doc": "Create a :rom:cls:`Dispatcher` belonging to the given pipeline."
},
"properties": [
{
"name": "stunServerAddress",
"doc": "address of the STUN server (Only IP address are supported)",
"type": "String"
},
{
"name": "stunServerPort",
"doc": "port of the STUN server",
"type": "int"
},
{
"name": "turnUrl",
"doc": "TURN server URL with this format: <code>user:password@address:port(?transport=[udp|tcp|tls])</code>.</br><code>address</code> must be an IP (not a domain).</br><code>transport</code> is optional (UDP by default).",
"type": "String"
},
{
"name": "ICECandidatePairs",
"doc": "the ICE candidate pair (local and remote candidates) used by the ice library for each stream.",
"type": "IceCandidatePair[]",
"readOnly": true
},
{
"name": "IceConnectionState",
"doc": "the ICE connection state for all the connections.",
"type": "IceConnection[]",
"readOnly": true
}
],
"methods": [
{
"params": [],
"name": "gatherCandidates",
"doc": "Start the gathering of ICE candidates.</br>It must be called after SdpEndpoint::generateOffer or SdpEndpoint::processOffer for Trickle ICE. If invoked before generating or processing an SDP offer, the candidates gathered will be added to the SDP processed."
},
{
"params": [
{
"name": "candidate",
"doc": "Remote ICE candidate",
"type": "IceCandidate"
"name": "source",
"doc": "Source port to be connected",
"type": "HubPort"
},
{
"name": "sink",
"doc": "Sink port to be connected",
"type": "HubPort"
}
],
"name": "addIceCandidate",
"doc": "Process an ICE candidate sent by the remote peer of the connection."
},
"name": "connect",
"doc": "Connects each corresponding :rom:enum:`MediaType` of the given source port with the sink port."
}
]
},
{
"name": "DispatcherOneToMany",
"doc": "A :rom:cls:`Hub` that sends a given source to all the connected sinks",
"extends": "Hub",
"constructor": {
"params": [
{
"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"type": "MediaPipeline"
}
],
"doc": "Create a :rom:cls:`DispatcherOneToMany` belonging to the given pipeline."
},
"methods": [
{
"params": [
{
"name": "label",
"doc": "Channel's label",
"type": "String",
"optional": true,
"defaultValue": ""
},
{
"name": "ordered",
"doc": "If the data channel should guarantee order or not. If true, and maxPacketLifeTime and maxRetransmits have not been provided, reliable mode is activated.",
"type": "boolean",
"optional": true,
"defaultValue": true
},
{
"name": "maxPacketLifeTime",
"doc": "The time window (in milliseconds) during which transmissions and retransmissions may take place in unreliable mode.</br>\n .. note:: This forces unreliable mode, even if <code>ordered</code> has been activated",
"type": "int",
"optional": true,
"defaultValue": -1
},
{
"name": "maxRetransmits",
"doc": "maximum number of retransmissions that are attempted in unreliable mode.</br>\n .. note:: This forces unreliable mode, even if <code>ordered</code> has been activated",
"type": "int",
"optional": true,
"defaultValue": -1
},
{
"name": "protocol",
"doc": "Name of the subprotocol used for data communication",
"type": "String",
"optional": true,
"defaultValue": ""
"name": "source",
"doc": "source to be broadcasted",
"type": "HubPort"
}
],
"name": "createDataChannel",
"doc": "Create a new data channel, if data channels are supported. If they are not supported, this method throws an exception.\n Being supported means that the WebRtcEndpoint has been created with data channel support, the client also supports data channels, and they have been negotaited in the SDP exchange.\n Otherwise, the method throws an exception, indicating that the operation is not possible.</br>\n Data channels can work in either unreliable mode (analogous to User Datagram Protocol or UDP) or reliable mode (analogous to Transmission Control Protocol or TCP).\n The two modes have a simple distinction:\n <ul>\n <li>Reliable mode guarantees the transmission of messages and also the order in which they are delivered. This takes extra overhead, thus potentially making this mode slower.</li>\n <li>Unreliable mode does not guarantee every message will get to the other side nor what order they get there. This removes the overhead, allowing this mode to work much faster.</li>\n </ul>"
"name": "setSource",
"doc": "Sets the source port that will be connected to the sinks of every :rom:cls:`HubPort` of the dispatcher"
},
{
"params": [
{
"name": "channelId",
"doc": "The channel identifier",
"type": "int"
}
],
"name": "closeDataChannel",
"doc": "Closes an open data channel"
"params": [],
"name": "removeSource",
"doc": "Remove the source port and stop the media pipeline."
}
],
"events": [
"OnIceCandidate",
"IceCandidateFound",
"OnIceGatheringDone",
"IceGatheringDone",
"OnIceComponentStateChanged",
"IceComponentStateChange",
"OnDataChannelOpened",
"DataChannelOpen",
"OnDataChannelClosed",
"DataChannelClose",
"NewCandidatePairSelected"
]
},
{
"name": "HttpEndpoint",
"doc": "Endpoint that enables Kurento to work as an HTTP server, allowing peer HTTP clients to access media.",
"abstract": true,
"extends": "SessionEndpoint",
"methods": [
{
"params": [],
"return": {
"type": "String",
"doc": "The url as a String"
},
"name": "getUrl",
"doc": "Obtains the URL associated to this endpoint"
}
]
},
{
"name": "HttpPostEndpoint",

@@ -242,3 +235,3 @@ "doc": "An :rom:cls:`HttpPostEndpoint` contains SINK pads for AUDIO and VIDEO, which provide access to an HTTP file upload function\n\n This type of endpoint provide unidirectional communications. Its :rom:cls:`MediaSources <MediaSource>` are accessed through the :term:`HTTP` POST method.",

"name": "useEncodedMedia",
"doc": "configures the endpoint to use encoded media instead of raw media. If the parameter is not set then the element uses raw media. Changing this parameter could affect in a severe way to stability because key frames lost will not be generated. Changing the media type does not affect to the result except in the performance (just in the case where original media and target media are the same) and in the problem with the key frames. We strongly recommended not to use this parameter because correct behaviour is not guarantied.",
"doc": "Feed the input media as-is to the Media Pipeline, instead of first decoding it.\n <p>\n When this property is not enabled, the input media gets always decoded into a raw format before being processed by the rest of the Media Pipeline; this is done to ensure that Kurento is able to keep track of lost keyframes among other quality-control measurements. Of course, having to decode the media has a cost in terms of CPU usage, but ensures that the output streaming will be robust and reliable.\n </p>\n <p>\n When this property is enabled, the explained behavior gets disabled. Instead, The endpoint will provide any input media directly to the Media Pipeline, without prior decoding. Enabling this mode of operation could have a severe effect on stability, because lost video keyframes will not be regenerated; however, avoiding a full cycle of decoding and encoding can be very useful for certain applications, because it improves performance by greatly reducing the CPU processing load.\n </p>\n <p>\n Keep in mind that if this property is enabled, the original source media MUST already have an encoding format which is compatible with the destination target. For example: given a pipeline which uses this endpoint to read a file and then streams it to a WebRTC browser such as Chrome, then the file must already be encoded with a VP8 or H.264 codec profile which Chrome is able to decode. Note that for this example, most browsers don't support ANY combination of H.264 encoding options; instead, they tend to support only a very specific subset of the codec features (also known as 'profiles').\n </p>\n <p>\n We strongly recommend to avoid using this option, because correct behavior cannot be guaranteed.\n </p>\n ",
"type": "boolean",

@@ -256,19 +249,2 @@ "optional": true,

{
"name": "HttpEndpoint",
"doc": "Endpoint that enables Kurento to work as an HTTP server, allowing peer HTTP clients to access media.",
"abstract": true,
"extends": "SessionEndpoint",
"methods": [
{
"params": [],
"return": {
"type": "String",
"doc": "The url as a String"
},
"name": "getUrl",
"doc": "Obtains the URL associated to this endpoint"
}
]
},
{
"name": "Mixer",

@@ -350,3 +326,3 @@ "doc": "A :rom:cls:`Hub` that allows routing of video between arbitrary port pairs and mixing of audio among several ports",

"name": "useEncodedMedia",
"doc": "use encoded instead of raw media. If the parameter is false then the element uses raw media. Changing this parameter can affect stability severely, as lost key frames will not be regenerated. Enabling this flag does not affect the overall behaviour, but has an impact in performance (just in case where original media and target media are the same). It will help solve the problem with lost key frames. We strongly recommended not to use this parameter because correct behaviour is not guarantied.",
"doc": "Feed the input media as-is to the Media Pipeline, instead of first decoding it.\n <p>\n When this property is not enabled, the input media gets always decoded into a raw format before being processed by the rest of the Media Pipeline; this is done to ensure that Kurento is able to keep track of lost keyframes among other quality-control measurements. Of course, having to decode the media has a cost in terms of CPU usage, but ensures that the output streaming will be robust and reliable.\n </p>\n <p>\n When this property is enabled, the explained behavior gets disabled. Instead, The endpoint will provide any input media directly to the Media Pipeline, without prior decoding. Enabling this mode of operation could have a severe effect on stability, because lost video keyframes will not be regenerated; however, avoiding a full cycle of decoding and encoding can be very useful for certain applications, because it improves performance by greatly reducing the CPU processing load.\n </p>\n <p>\n Keep in mind that if this property is enabled, the original source media MUST already have an encoding format which is compatible with the destination target. For example: given a pipeline which uses this endpoint to read a file and then streams it to a WebRTC browser such as Chrome, then the file must already be encoded with a VP8 or H.264 codec profile which Chrome is able to decode. Note that for this example, most browsers don't support ANY combination of H.264 encoding options; instead, they tend to support only a very specific subset of the codec features (also known as 'profiles').\n </p>\n <p>\n We strongly recommend to avoid using this option, because correct behavior cannot be guaranteed.\n </p>\n ",
"type": "boolean",

@@ -358,3 +334,3 @@ "optional": true,

"name": "networkCache",
"doc": "When using rtsp sources. Amount of ms to buffer",
"doc": "When using RTSP sources: Amount of milliseconds to buffer",
"type": "int",

@@ -375,2 +351,8 @@ "optional": true,

{
"name": "elementGstreamerDot",
"doc": "Returns the GStreamer DOT string for this element's private pipeline",
"type": "String",
"readOnly": true
},
{
"name": "position",

@@ -393,5 +375,5 @@ "doc": "Get or set the actual position of the video in ms. .. note:: Setting the position only works for seekable videos",

{
"name": "AlphaBlending",
"doc": "A :rom:cls:`Hub` that mixes the :rom:attr:`MediaType.AUDIO` stream of its connected sources and constructs one output with :rom:attr:`MediaType.VIDEO` streams of its connected sources into its sink",
"extends": "Hub",
"name": "RecorderEndpoint",
"doc": "\n<p>\n Provides the functionality to store contents. The recorder can store in local\n files or in a network resource. It receives a media stream from another\n MediaElement (i.e. the source), and stores it in the designated location.\n</p>\n\n<p>\n The following information has to be provided In order to create a\n RecorderEndpoint, and cannot be changed afterwards:\n</p>\n\n<ul>\n <li>\n URI of the resource where media will be stored. Following schemas are\n supported:\n <ul>\n <li>\n Files: mounted in the local file system.\n <ul>\n <li><code>file:///path/to/file</code></li>\n </ul>\n </li>\n\n <li>\n HTTP: Requires the server to support method PUT\n <ul>\n <li><code>http(s)://{server-ip}/path/to/file</code></li>\n <li>\n <code>http(s)://username:password@{server-ip}/path/to/file</code>\n </li>\n </ul>\n </li>\n </ul>\n </li>\n <li>\n Relative URIs (with no schema) are supported. They are completed prepending\n a default URI defined by property <i>defaultPath</i>. This property is\n defined in the configuration file\n <i>/etc/kurento/modules/kurento/UriEndpoint.conf.ini</i>, and the default\n value is <code>file:///var/lib/kurento/</code>\n </li>\n <li>\n The media profile (@MediaProfileSpecType) used to store the file. This will\n determine the encoding. See below for more details about media profile.\n </li>\n <li>\n Optionally, the user can select if the endpoint will stop processing once\n the EndOfStream event is detected.\n </li>\n</ul>\n\n<p>\n RecorderEndpoint requires access to the resource where stream is going to be\n recorded. If it's a local file (<code>file://</code>), the system user running\n the media server daemon (kurento by default), needs to have write permissions\n for that URI. If it's an HTTP server, it must be accessible from the machine\n where media server is running, and also have the correct access rights.\n Otherwise, the media server won't be able to store any information, and an\n ErrorEvent will be fired. Please note that if you haven't subscribed to that\n type of event, you can be left wondering why your media is not being saved,\n while the error message was ignored.\n</p>\n\n<p>\n The media profile is quite an important parameter, as it will determine\n whether the server needs to perform on-the-fly transcoding of the media. If\n the input stream codec if not compatible with the selected media profile, the\n media will be transcoded into a suitable format. This will result in a higher\n CPU load and will impact overall performance of the media server.\n</p>\n\nFor example: Say that your pipeline will receive <b>VP8</b>-encoded video from\nWebRTC, and sends it to a RecorderEndpoint; depending on the format selected...\n<ul>\n <li>\n WEBM: The input codec is the same as the recording format, so no transcoding\n will take place.\n </li>\n <li>\n MP4: The media server will have to transcode from <b>VP8</b> to <b>H264</b>.\n This will raise the CPU load in the system.\n </li>\n <li>\n MKV: Again, video must be transcoded from <b>VP8</b> to <b>H264</b>, which\n means more CPU load.\n </li>\n</ul>\nFrom this you can see how selecting the correct format for your application is a\nvery important decision.\n\n<p>\n Recording will start as soon as the user invokes the record method. The\n recorder will then store, in the location indicated, the media that the source\n is sending to the endpoint's sink. If no media is being received, or no\n endpoint has been connected, then the destination will be empty. The recorder\n starts storing information into the file as soon as it gets it.\n</p>\n\n<p>\n When another endpoint is connected to the recorder, by default both AUDIO and\n VIDEO media types are expected, unless specified otherwise when invoking the\n connect method. Failing to provide both types, will result in teh recording\n buffering the received media: it won't be written to the file until the\n recording is stopped. This is due to the recorder waiting for the other type\n of media to arrive, so they are synchronized.\n</p>\n\n<p>\n The source endpoint can be hot-swapped, while the recording is taking place.\n The recorded file will then contain different feeds. When switching video\n sources, if the new video has different size, the recorder will retain the\n size of the previous source. If the source is disconnected, the last frame\n recorded will be shown for the duration of the disconnection, or until the\n recording is stopped.\n</p>\n\n<p>\n It is recommended to start recording only after media arrives, either to the\n endpoint that is the source of the media connected to the recorder, to the\n recorder itself, or both. Users may use the MediaFlowIn and MediaFlowOut\n events, and synchronize the recording with the moment media comes in. In any\n case, nothing will be stored in the file until the first media packets arrive.\n</p>\n\n<p>\n Stopping the recording process is done through the stopAndWait method, which\n will return only after all the information was stored correctly. If the file\n is empty, this means that no media arrived at the recorder.\n</p>\n ",
"extends": "UriEndpoint",
"constructor": {

@@ -401,61 +383,43 @@ "params": [

"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"doc": "the :rom:cls:`MediaPipeline` to which the endpoint belongs",
"type": "MediaPipeline"
},
{
"name": "uri",
"doc": "URI where the recording will be stored. It has to be accessible to the KMS process.\n <ul>\n <li>Local server resources: The user running the Kurento Media Server must have write permission over the file.</li>\n <li>Network resources: Must be accessible from the server where the media server is running.</li>\n </ul>",
"type": "String"
},
{
"name": "mediaProfile",
"doc": "Sets the media profile used for recording. If the profile is different than the one being recieved at the sink pad, media will be trnascoded, resulting in a higher CPU load. For instance, when recording a VP8 encoded video from a WebRTC endpoint in MP4, the load is higher that when recording in WEBM.",
"type": "MediaProfileSpecType",
"optional": true,
"defaultValue": "WEBM"
},
{
"name": "stopOnEndOfStream",
"doc": "Forces the recorder endpoint to finish processing data when an :term:`EOS` is detected in the stream",
"type": "boolean",
"optional": true,
"defaultValue": false
}
],
"doc": "Create for the given pipeline"
"doc": ""
},
"methods": [
{
"params": [
{
"name": "source",
"doc": "The reference to the HubPort setting as master port",
"type": "HubPort"
},
{
"name": "zOrder",
"doc": "The order in z to draw the master image",
"type": "int"
}
],
"name": "setMaster",
"doc": "Sets the source port that will be the master entry to the mixer"
"params": [],
"name": "record",
"doc": "Starts storing media received through the sink pad."
},
{
"params": [
{
"name": "relativeX",
"doc": "The x position relative to the master port. Values from 0 to 1 are accepted. The value 0, indicates the coordinate 0 in the master image.",
"type": "float"
},
{
"name": "relativeY",
"doc": "The y position relative to the master port. Values from 0 to 1 are accepted. The value 0, indicates the coordinate 0 in the master image.",
"type": "float"
},
{
"name": "zOrder",
"doc": "The order in z to draw the images. The greatest value of z is in the top.",
"type": "int"
},
{
"name": "relativeWidth",
"doc": "The image width relative to the master port width. Values from 0 to 1 are accepted.",
"type": "float"
},
{
"name": "relativeHeight",
"doc": "The image height relative to the master port height. Values from 0 to 1 are accepted.",
"type": "float"
},
{
"name": "port",
"doc": "The reference to the confingured port.",
"type": "HubPort"
}
],
"name": "setPortProperties",
"doc": "Configure the blending mode of one port."
"params": [],
"name": "stopAndWait",
"doc": "Stops recording and does not return until all the content has been written to the selected uri. This can cause timeouts on some clients if there is too much content to write, or the transport is slow"
}
],
"events": [
"Recording",
"Paused",
"Stopped"
]

@@ -465,3 +429,3 @@ },

"name": "RtpEndpoint",
"doc": "Endpoint that provides bidirectional content delivery capabilities with remote networked peers through RTP or SRTP protocol. An :rom:cls:`RtpEndpoint` contains paired sink and source :rom:cls:`MediaPad` for audio and video. This endpoint inherits from :rom:cls:`BaseRtpEndpoint`.\n </p>\n <p>\n In order to establish an RTP/SRTP communication, peers engage in an SDP negotiation process, where one of the peers (the offerer) sends an offer, while the other peer (the offeree) responds with an answer. This endpoint can function in both situations\n <ul style='list-style-type:circle'>\n <li>\n As offerer: The negotiation process is initiated by the media server\n <ul>\n <li>KMS generates the SDP offer through the generateOffer method. This offer must then be sent to the remote peer (the offeree) through the signaling channel, for processing.</li>\n <li>The remote peer process the Offer, and generates an Answer to this offer. The Answer is sent back to the media server.</li>\n <li>Upon receiving the Answer, the endpoint must invoke the processAnswer method.</li>\n </ul>\n </li>\n <li>\n As offeree: The negotiation process is initiated by the remote peer\n <ul>\n <li>The remote peer, acting as offerer, generates an SDP offer and sends it to the WebRTC endpoint in Kurento.</li>\n <li>The endpoint will process the Offer invoking the processOffer method. The result of this method will be a string, containing an SDP Answer.</li>\n <li>The SDP Answer must be sent back to the offerer, so it can be processed.</li>\n </ul>\n </li>\n </ul>\n </p>\n <p>\n In case of unidirectional connections (i.e. only one peer is going to send media), the process is more simple, as only the emitter needs to process an SDP. On top of the information about media codecs and types, the SDP must contain the IP of the remote peer, and the port where it will be listening. This way, the SDP can be mangled without needing to go through the exchange process, as the receiving peer does not need to process any answer.\n </p>\n <p>\n While there is no congestion control in this endpoint, the user can set some bandwidth limits that will be used during the negotiation process.\n The default bandwidth range of the endpoint is 100kbps-500kbps, but it can be changed separately for input/output directions and for audio/video streams.\n <ul style='list-style-type:circle'>\n <li>\n Input bandwidth control mechanism: Configuration interval used to inform remote peer the range of bitrates that can be pushed into this RtpEndpoint object. These values are announced in the SDP.\n <ul>\n <li>\n setMaxVideoRecvBandwidth: sets Max bitrate limits expected for received video stream.\n </li>\n <li>\n setMaxAudioRecvBandwidth: sets Max bitrate limits expected for received audio stream.\n </li>\n </ul>\n </li>\n <li>\n Output bandwidth control mechanism: Configuration interval used to control bitrate of the output video stream sent to remote peer. Remote peers can also announce bandwidth limitation in their SDPs (through the b=<modifier>:<value> tag). Kurento will always enforce bitrate limitations specified by the remote peer over internal configurations.\n <ul>\n <li>\n setMaxVideoSendBandwidth: sets Max bitrate limits for video sent to remote peer.\n </li>\n <li>\n setMinVideoSendBandwidth: sets Min bitrate limits for audio sent to remote peer.\n </li>\n </ul>\n </li>\n </ul>\n All bandwidth control parameters must be changed before the SDP negotiation takes place, and can't be modified afterwards.\n TODO: What happens if the b=as tag form the SDP has a lower value than the one set in setMinVideoSendBandwidth?\n </p>\n <p>\n Having no congestion ocntrol implementation means that the bitrate will remain constant. This is something to take into consideration when setting upper limits for the output bandwidth, or the local network connection can be overflooded.\n </p>\n ",
"doc": "Endpoint that provides bidirectional content delivery capabilities with remote networked peers through RTP or SRTP protocol. An :rom:cls:`RtpEndpoint` contains paired sink and source :rom:cls:`MediaPad` for audio and video. This endpoint inherits from :rom:cls:`BaseRtpEndpoint`.\n </p>\n <p>\n In order to establish an RTP/SRTP communication, peers engage in an SDP negotiation process, where one of the peers (the offerer) sends an offer, while the other peer (the offeree) responds with an answer. This endpoint can function in both situations\n <ul style='list-style-type:circle'>\n <li>\n As offerer: The negotiation process is initiated by the media server\n <ul>\n <li>KMS generates the SDP offer through the generateOffer method. This offer must then be sent to the remote peer (the offeree) through the signaling channel, for processing.</li>\n <li>The remote peer process the Offer, and generates an Answer to this offer. The Answer is sent back to the media server.</li>\n <li>Upon receiving the Answer, the endpoint must invoke the processAnswer method.</li>\n </ul>\n </li>\n <li>\n As offeree: The negotiation process is initiated by the remote peer\n <ul>\n <li>The remote peer, acting as offerer, generates an SDP offer and sends it to the WebRTC endpoint in Kurento.</li>\n <li>The endpoint will process the Offer invoking the processOffer method. The result of this method will be a string, containing an SDP Answer.</li>\n <li>The SDP Answer must be sent back to the offerer, so it can be processed.</li>\n </ul>\n </li>\n </ul>\n </p>\n <p>\n In case of unidirectional connections (i.e. only one peer is going to send media), the process is more simple, as only the emitter needs to process an SDP. On top of the information about media codecs and types, the SDP must contain the IP of the remote peer, and the port where it will be listening. This way, the SDP can be mangled without needing to go through the exchange process, as the receiving peer does not need to process any answer.\n </p>\n <p>\n The user can set some bandwidth limits that will be used during the negotiation process.\n The default bandwidth range of the endpoint is 100kbps-500kbps, but it can be changed separately for input/output directions and for audio/video streams.\n <ul style='list-style-type:circle'>\n <li>\n Input bandwidth control mechanism: Configuration interval used to inform remote peer the range of bitrates that can be pushed into this RtpEndpoint object. These values are announced in the SDP.\n <ul>\n <li>\n setMaxVideoRecvBandwidth: sets Max bitrate limits expected for received video stream.\n </li>\n <li>\n setMaxAudioRecvBandwidth: sets Max bitrate limits expected for received audio stream.\n </li>\n </ul>\n </li>\n <li>\n Output bandwidth control mechanism: Configuration interval used to control bitrate of the output video stream sent to remote peer. Remote peers can also announce bandwidth limitation in their SDPs (through the b=<modifier>:<value> tag). Kurento will always enforce bitrate limitations specified by the remote peer over internal configurations.\n <ul>\n <li>\n setMaxVideoSendBandwidth: sets Max bitrate limits for video sent to remote peer.\n </li>\n <li>\n setMinVideoSendBandwidth: sets Min bitrate limits for audio sent to remote peer.\n </li>\n </ul>\n </li>\n </ul>\n All bandwidth control parameters must be changed before the SDP negotiation takes place, and can't be modified afterwards.\n TODO: What happens if the b=as tag form the SDP has a lower value than the one set in setMinVideoSendBandwidth?\n </p>\n <p>\n Take into consideration that setting a too high upper limit for the output bandwidth can be a reason for the local network connection to be overflooded.\n </p>\n ",
"extends": "BaseRtpEndpoint",

@@ -497,5 +461,5 @@ "constructor": {

{
"name": "RecorderEndpoint",
"doc": "<p>\n Provides the functionality to store contents. The recorder can store in local files or in a network resource. It receives a media stream from another MediaElement (i.e. the source), and stores it in the designated location.\n </p>\n <p>\n The following information has to be provided In order to create a RecorderEndpoint, and can’t be changed afterwards:\n <ul>\n <li>\n URI of the resource where media will be stored. Following schemas are supported:\n <ul>\n <li>\n Files: mounted in the local file system.\n <ul>\n <li>file://<path-to-file></li>\n </ul>\n <li>\n HTTP: Requires the server to support method PUT\n <ul>\n <li>\n http(s)://<server-ip>/path/to/file\n </li>\n <li>\n http(s)://username:password@<server-ip>/path/to/file\n </li>\n </ul>\n </li>\n </ul>\n </li>\n <li>\n Relative URIs (with no schema) are supported. They are completed prepending a default URI defined by property defaultPath. This property allows using relative paths instead of absolute paths. If a relative path is provided, defaultPath will be prepended. This property is defined in the configuration file /etc/kurento/modules/kurento/UriEndpoint.conf.ini, and the default value is file:///var/kurento/\n </li>\n <li>\n The media profile used to store the file. This will determine the encoding. See below for more details about media profile\n </li>\n <li>\n Optionally, the user can select if the endpoint will stop processing once the EndOfStream event is detected.\n </li>\n </ul>\n <p>\n </p>\n RecorderEndpoint requires access to the resource where stream is going to be recorded. If it’s a local file (file://), the system user running the media server daemon (kurento by default), needs to have write permissions for that URI. If it’s an HTTP server, it must be accessible from the machine where media server is running, and also have the correct access rights. Otherwise, the media server won’t be able to store any information, and an ErrorEvent will be fired. Please note that if you haven't subscribed to that type of event, you can be left wondering why your media is not being saved, while the error message was ignored.\n <p>\n </p>\n The media profile is quite an important parameter, as it will determine whether there is a transcodification or not. If the input stream codec if not compatible with the selected media profile, the media will be transcoded into a suitable format, before arriving at the RecorderEndpoint's sink pad. This will result in a higher CPU load and will impact overall performance of the media server. For instance, if a VP8 encoded video received through a WebRTC endpoint arrives at the RecorderEndpoint, depending on the format configured in the recorder:\n <ul>\n <li>WEBM: No transcodification will take place.</li>\n <li>MP4: The media server will have to transcode the media received from VP8 to H264. This will raise the CPU load in the system.</li>\n </ul>\n <p>\n </p>\n Recording will start as soon as the user invokes the record method. The recorder will then store, in the location indicated, the media that the source is sending to the endpoint’s sink. If no media is being received, or no endpoint has been connected, then the destination will be empty. The recorder starts storing information into the file as soon as it gets it.\n <p>\n </p>\n When another endpoint is connected to the recorder, by default both AUDIO and VIDEO media types are expected, unless specified otherwise when invoking the connect method. Failing to provide both types, will result in teh recording buffering the received media: it won’t be written to the file until the recording is stopped. This is due to the recorder waiting for the other type of media to arrive, so they are synchronised.\n <p>\n </p>\n The source endpoint can be hot-swapped, while the recording is taking place. The recorded file will then contain different feeds. When switching video sources, if the new video has different size, the recorder will retain the size of the previous source. If the source is disconnected, the last frame recorded will be shown for the duration of the disconnection, or until the recording is stopped.\n <p>\n </p>\n It is recommended to start recording only after media arrives, either to the endpoint that is the source of the media connected to the recorder, to the recorder itself, or both. Users may use the MediaFlowIn and MediaFlowOut events, and synchronise the recording with the moment media comes in. In any case, nothing will be stored in the file until the first media packets arrive.\n <p>\n </p>\n Stopping the recording process is done through the stopAndWait method, which will return only after all the information was stored correctly. If the file is empty, this means that no media arrived at the recorder.\n </p>",
"extends": "UriEndpoint",
"name": "WebRtcEndpoint",
"doc": "<p>\n Control interface for Kurento WebRTC endpoint.\n </p>\n <p>\n This endpoint is one side of a peer-to-peer WebRTC communication, being the other peer a WebRTC capable browser -using the RTCPeerConnection API-, a native WebRTC app or even another Kurento Media Server.\n </p>\n <p>\n In order to establish a WebRTC communication, peers engage in an SDP negotiation process, where one of the peers (the offerer) sends an offer, while the other peer (the offeree) responds with an answer. This endpoint can function in both situations\n <ul>\n <li>\n As offerer: The negotiation process is initiated by the media server\n <ul style='list-style-type:circle'>\n <li>KMS generates the SDP offer through the <code>generateOffer</code> method. This <i>offer</i> must then be sent to the remote peer (the offeree) through the signaling channel, for processing.</li>\n <li>The remote peer process the <i>offer</i>, and generates an <i>answer</i> to this <i>offer</i>. The <i>answer</i> is sent back to the media server.</li>\n <li>Upon receiving the <i>answer</i>, the endpoint must invoke the <code>processAnswer</code> method.</li>\n </ul>\n </li>\n <li>\n As offeree: The negotiation process is initiated by the remote peer\n <ul>\n <li>The remote peer, acting as offerer, generates an SDP <i>offer</i> and sends it to the WebRTC endpoint in Kurento.</li>\n <li>The endpoint will process the <i>offer</i> invoking the <code>processOffer</code> method. The result of this method will be a string, containing an SDP <i>answer</i>.</li>\n <li>The SDP <i>answer</i> must be sent back to the offerer, so it can be processed.</li>\n </ul>\n </li>\n </ul>\n </p>\n <p>\n SDPs are sent without ICE candidates, following the Trickle ICE optimization. Once the SDP negotiation is completed, both peers proceed with the ICE discovery process, intended to set up a bidirectional media connection. During this process, each peer\n <ul>\n <li>Discovers ICE candidates for itself, containing pairs of IPs and ports.</li>\n <li>ICE candidates are sent via the signaling channel as they are discovered, to the remote peer for probing.</li>\n <li>ICE connectivity checks are run as soon as the new candidate description, from the remote peer, is available.</li>\n </ul>\n Once a suitable pair of candidates (one for each peer) is discovered, the media session can start. The harvesting process in Kurento, begins with the invocation of the <code>gatherCandidates</code> method. Since the whole Trickle ICE purpose is to speed-up connectivity, candidates are generated asynchronously. Therefore, in order to capture the candidates, the user must subscribe to the event <code>IceCandidateFound</code>. It is important that the event listener is bound before invoking <code>gatherCandidates</code>, otherwise a suitable candidate might be lost, and connection might not be established.\n </p>\n <p>\n It's important to keep in mind that WebRTC connection is an asynchronous process, when designing interactions between different MediaElements. For example, it would be pointless to start recording before media is flowing. In order to be notified of state changes, the application can subscribe to events generated by the WebRtcEndpoint. Following is a full list of events generated by WebRtcEndpoint:\n <ul>\n <li>\n <code>IceComponentStateChange</code>: This event informs only about changes in the ICE connection state. Possible values are:\n <ul style='list-style-type:circle'>\n <li><code>DISCONNECTED</code>: No activity scheduled</li>\n <li><code>GATHERING</code>: Gathering local candidates</li>\n <li><code>CONNECTING</code>: Establishing connectivity</li>\n <li><code>CONNECTED</code>: At least one working candidate pair</li>\n <li><code>READY</code>: ICE concluded, candidate pair selection is now final</li>\n <li><code>FAILED</code>: Connectivity checks have been completed, but media connection was not established</li>\n </ul>\n The transitions between states are covered in RFC5245.\n It could be said that it's network-only, as it only takes into account the state of the network connection, ignoring other higher level stuff, like DTLS handshake, RTCP flow, etc. This implies that, while the component state is <code>CONNECTED</code>, there might be no media flowing between the peers. This makes this event useful only to receive low-level information about the connection between peers. Even more, while other events might leave a graceful period of time before firing, this event fires immediately after the state change is detected.\n </li>\n <li>\n <code>IceCandidateFound</code>: Raised when a new candidate is discovered. ICE candidates must be sent to the remote peer of the connection. Failing to do so for some or all of the candidates might render the connection unusable.\n </li>\n <li>\n <code>IceGatheringDone</code>: Raised when the ICE harvesting process is completed. This means that all candidates have already been discovered.\n </li>\n <li>\n <code>NewCandidatePairSelected</code>: Raised when a new ICE candidate pair gets selected. The pair contains both local and remote candidates being used for a component. This event can be raised during a media session, if a new pair of candidates with higher priority in the link are found.\n </li>\n <li>\n <code>DataChannelOpen</code>: Raised when a data channel is open.\n </li>\n <li>\n <code>DataChannelClose</code>: Raised when a data channel is closed.\n </li>\n </ul>\n </p>\n <p>\n Registering to any of above events requires the application to provide a callback function. Each event provides different information, so it is recommended to consult the signature of the event listeners.\n </p>\n <p>\n Flow control and congestion management is one of the most important features of WebRTC. WebRTC connections start with the lowest bandwidth configured and slowly ramps up to the maximum available bandwidth, or to the higher limit of the exploration range in case no bandwidth limitation is detected. Notice that WebRtcEndpoints in Kurento are designed in a way that multiple WebRTC connections fed by the same stream share quality. When a new connection is added, as it requires to start with low bandwidth, it will cause the rest of connections to experience a transient period of degraded quality, until it stabilizes its bitrate. This doesn't apply when transcoding is involved. Transcoders will adjust their output bitrate based in bandwidth requirements, but it won't affect the original stream. If an incoming WebRTC stream needs to be transcoded, for whatever reason, all WebRtcEndpoints fed from transcoder output will share a separate quality than the ones connected directly to the original stream.\n </p>\n <p>\n The default bandwidth range of the endpoint is 100kbps-500kbps, but it can be changed separately for input/output directions and for audio/video streams.\n <ul>\n <li>\n Input bandwidth control mechanism: Configuration interval used to inform remote peer the range of bitrates that can be pushed into this WebRtcEndpoint object.\n <ul style='list-style-type:circle'>\n <li>\n setMin/MaxVideoRecvBandwidth: sets Min/Max bitrate limits expected for received video stream.\n </li>\n <li>\n setMin/MaxAudioRecvBandwidth: sets Min/Max bitrate limits expected for received audio stream.\n </li>\n </ul>\n Max values are announced in the SDP, while min values are set to limit the lower value of REMB packages. It follows that min values will only have effect in peers that support this control mechanism, such as Chrome.\n </li>\n <li>\n Output bandwidth control mechanism: Configuration interval used to control bitrate of the output video stream sent to remote peer. It is important to keep in mind that pushed bitrate depends on network and remote peer capabilities. Remote peers can also announce bandwidth limitation in their SDPs (through the <code>b=<modifier>:<value></code> tag). Kurento will always enforce bitrate limitations specified by the remote peer over internal configurations.\n <ul style='list-style-type:circle'>\n <li>\n setMin/MaxVideoSendBandwidth: sets Min/Max bitrate limits for video sent to remote peer\n </li>\n </ul>\n </li>\n </ul>\n All bandwidth control parameters must be changed before the SDP negotiation takes place, and can't be changed afterwards.\n </p>\n <p>\n DataChannels allow other media elements that make use of the DataPad, to send arbitrary data. For instance, if there is a filter that publishes event information, it'll be sent to the remote peer through the channel. There is no API available for programmers to make use of this feature in the WebRtcElement. DataChannels can be configured to provide the following:\n <ul>\n <li>\n Reliable or partially reliable delivery of sent messages\n </li>\n <li>\n In-order or out-of-order delivery of sent messages\n </li>\n </ul>\n Unreliable, out-of-order delivery is equivalent to raw UDP semantics. The message may make it, or it may not, and order is not important. However, the channel can be configured to be <i>partially reliable</i> by specifying the maximum number of retransmissions or setting a time limit for retransmissions: the WebRTC stack will handle the acknowledgments and timeouts.\n </p>\n <p>\n The possibility to create DataChannels in a WebRtcEndpoint must be explicitly enabled when creating the endpoint, as this feature is disabled by default. If this is the case, they can be created invoking the createDataChannel method. The arguments for this method, all of them optional, provide the necessary configuration:\n <ul>\n <li>\n <code>label</code>: assigns a label to the DataChannel. This can help identify each possible channel separately.\n </li>\n <li>\n <code>ordered</code>: specifies if the DataChannel guarantees order, which is the default mode. If maxPacketLifetime and maxRetransmits have not been set, this enables reliable mode.\n </li>\n <li>\n <code>maxPacketLifeTime</code>: The time window in milliseconds, during which transmissions and retransmissions may take place in unreliable mode. This forces unreliable mode, even if <code>ordered</code> has been activated.\n </li>\n <li>\n <code>maxRetransmits</code>: maximum number of retransmissions that are attempted in unreliable mode. This forces unreliable mode, even if <code>ordered</code> has been activated.\n </li>\n <li>\n <code>Protocol</code>: Name of the subprotocol used for data communication.\n </li>\n </ul>\n ",
"extends": "BaseRtpEndpoint",
"constructor": {

@@ -509,88 +473,144 @@ "params": [

{
"name": "uri",
"doc": "URI where the recording will be stored. It has to be accessible to the KMS process.\n <ul>\n <li>Local server resources: The user running the Kurento Media Server must have write permission over the file.</li>\n <li>Network resources: Must be accessible from the server where the media server is running.</li>\n </ul>",
"type": "String"
"name": "recvonly",
"doc": "Single direction, receive-only endpoint",
"type": "boolean",
"optional": true,
"defaultValue": false
},
{
"name": "mediaProfile",
"doc": "Sets the media profile used for recording. If the profile is different than the one being recieved at the sink pad, media will be trnascoded, resulting in a higher CPU load. For instance, when recording a VP8 encoded video from a WebRTC endpoint in MP4, the load is higher that when recording in WEBM.",
"type": "MediaProfileSpecType",
"name": "sendonly",
"doc": "Single direction, send-only endpoint",
"type": "boolean",
"optional": true,
"defaultValue": "WEBM"
"defaultValue": false
},
{
"name": "stopOnEndOfStream",
"doc": "Forces the recorder endpoint to finish processing data when an :term:`EOS` is detected in the stream",
"name": "useDataChannels",
"doc": "Activate data channels support",
"type": "boolean",
"optional": true,
"defaultValue": false
},
{
"name": "certificateKeyType",
"doc": "Define the type of the certificate used in dtls",
"type": "CertificateKeyType",
"optional": true,
"defaultValue": "RSA"
}
],
"doc": ""
"doc": "Builder for the :rom:cls:`WebRtcEndpoint`"
},
"methods": [
"properties": [
{
"params": [],
"name": "record",
"doc": "Starts storing media received through the sink pad."
"name": "stunServerAddress",
"doc": "address of the STUN server (Only IP address are supported)",
"type": "String"
},
{
"params": [],
"name": "stopAndWait",
"doc": "Stops recording and does not return until all the content has been written to the selected uri. This can cause timeouts on some clients if there is too much content to write, or the transport is slow"
"name": "stunServerPort",
"doc": "port of the STUN server",
"type": "int"
},
{
"name": "turnUrl",
"doc": "TURN server URL with this format: <code>user:password@address:port(?transport=[udp|tcp|tls])</code>.</br><code>address</code> must be an IP (not a domain).</br><code>transport</code> is optional (UDP by default).",
"type": "String"
},
{
"name": "ICECandidatePairs",
"doc": "the ICE candidate pair (local and remote candidates) used by the ice library for each stream.",
"type": "IceCandidatePair[]",
"readOnly": true
},
{
"name": "IceConnectionState",
"doc": "the ICE connection state for all the connections.",
"type": "IceConnection[]",
"readOnly": true
}
],
"events": [
"Recording",
"Paused",
"Stopped"
]
},
{
"name": "Dispatcher",
"doc": "A :rom:cls:`Hub` that allows routing between arbitrary port pairs",
"extends": "Hub",
"constructor": {
"params": [
{
"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"type": "MediaPipeline"
}
],
"doc": "Create a :rom:cls:`Dispatcher` belonging to the given pipeline."
},
"methods": [
{
"params": [],
"name": "gatherCandidates",
"doc": "Start the gathering of ICE candidates.</br>It must be called after SdpEndpoint::generateOffer or SdpEndpoint::processOffer for Trickle ICE. If invoked before generating or processing an SDP offer, the candidates gathered will be added to the SDP processed."
},
{
"params": [
{
"name": "source",
"doc": "Source port to be connected",
"type": "HubPort"
"name": "candidate",
"doc": "Remote ICE candidate",
"type": "IceCandidate"
}
],
"name": "addIceCandidate",
"doc": "Process an ICE candidate sent by the remote peer of the connection."
},
{
"params": [
{
"name": "label",
"doc": "Channel's label",
"type": "String",
"optional": true,
"defaultValue": ""
},
{
"name": "sink",
"doc": "Sink port to be connected",
"type": "HubPort"
"name": "ordered",
"doc": "If the data channel should guarantee order or not. If true, and maxPacketLifeTime and maxRetransmits have not been provided, reliable mode is activated.",
"type": "boolean",
"optional": true,
"defaultValue": true
},
{
"name": "maxPacketLifeTime",
"doc": "The time window (in milliseconds) during which transmissions and retransmissions may take place in unreliable mode.</br>\n .. note:: This forces unreliable mode, even if <code>ordered</code> has been activated",
"type": "int",
"optional": true,
"defaultValue": -1
},
{
"name": "maxRetransmits",
"doc": "maximum number of retransmissions that are attempted in unreliable mode.</br>\n .. note:: This forces unreliable mode, even if <code>ordered</code> has been activated",
"type": "int",
"optional": true,
"defaultValue": -1
},
{
"name": "protocol",
"doc": "Name of the subprotocol used for data communication",
"type": "String",
"optional": true,
"defaultValue": ""
}
],
"name": "connect",
"doc": "Connects each corresponding :rom:enum:`MediaType` of the given source port with the sink port."
"name": "createDataChannel",
"doc": "Create a new data channel, if data channels are supported. If they are not supported, this method throws an exception.\n Being supported means that the WebRtcEndpoint has been created with data channel support, the client also supports data channels, and they have been negotaited in the SDP exchange.\n Otherwise, the method throws an exception, indicating that the operation is not possible.</br>\n Data channels can work in either unreliable mode (analogous to User Datagram Protocol or UDP) or reliable mode (analogous to Transmission Control Protocol or TCP).\n The two modes have a simple distinction:\n <ul>\n <li>Reliable mode guarantees the transmission of messages and also the order in which they are delivered. This takes extra overhead, thus potentially making this mode slower.</li>\n <li>Unreliable mode does not guarantee every message will get to the other side nor what order they get there. This removes the overhead, allowing this mode to work much faster.</li>\n </ul>"
},
{
"params": [
{
"name": "channelId",
"doc": "The channel identifier",
"type": "int"
}
],
"name": "closeDataChannel",
"doc": "Closes an open data channel"
}
],
"events": [
"OnIceCandidate",
"IceCandidateFound",
"OnIceGatheringDone",
"IceGatheringDone",
"OnIceComponentStateChanged",
"IceComponentStateChange",
"OnDataChannelOpened",
"DataChannelOpen",
"OnDataChannelClosed",
"DataChannelClose",
"NewCandidatePairSelected"
]
},
{
"name": "Composite",
"doc": "A :rom:cls:`Hub` that mixes the :rom:attr:`MediaType.AUDIO` stream of its connected sources and constructs a grid with the :rom:attr:`MediaType.VIDEO` streams of its connected sources into its sink",
"extends": "Hub",
"constructor": {
"params": [
{
"name": "mediaPipeline",
"doc": "the :rom:cls:`MediaPipeline` to which the dispatcher belongs",
"type": "MediaPipeline"
}
],
"doc": "Create for the given pipeline"
}
}

@@ -693,35 +713,11 @@ ],

{
"typeFormat": "REGISTER",
"properties": [
{
"name": "isSeekable",
"doc": "Seek is possible in video source",
"type": "boolean"
},
{
"name": "seekableInit",
"doc": "First video position to do seek in ms",
"type": "int64"
},
{
"name": "seekableEnd",
"doc": "Last video position to do seek in ms",
"type": "int64"
},
{
"name": "duration",
"doc": "Video duration in ms",
"type": "int64"
}
],
"name": "VideoInfo",
"doc": ""
},
{
"typeFormat": "ENUM",
"values": [
"WEBM",
"MKV",
"MP4",
"WEBM_VIDEO_ONLY",
"WEBM_AUDIO_ONLY",
"MKV_VIDEO_ONLY",
"MKV_AUDIO_ONLY",
"MP4_VIDEO_ONLY",

@@ -733,3 +729,3 @@ "MP4_AUDIO_ONLY",

"name": "MediaProfileSpecType",
"doc": "Media Profile.\n\nCurrently WEBM, MP4 and JPEG are supported."
"doc": "Media Profile.\n\nCurrently WEBM, MKV, MP4 and JPEG are supported."
},

@@ -752,3 +748,3 @@ {

"name": "key",
"doc": " A string representing the cryptographic key used. The length varies depending on the cryptographic method used (30 bytes length for AES_128_CM, or 46 bytes length for AES_256_CM). If no key is provided a random one will be generated using the `getrandom` system call",
"doc": "<p>Master key and salt (plain text)</p>\n <p>\n This field provides the the cryptographic master key appended with the master salt, in plain text format. This allows to provide a key that is composed of readable ASCII characters.\n </p>\n <p>\n The expected length of the key (as provided to this parameter) is determined by the crypto-suite for which the key applies (30 characters for AES_CM_128, 46 characters for AES_CM_256). If the length does not match the expected value, the key will be considered invalid.\n </p>\n <p>\n If no key is provided, a random one will be generated using the `getrandom` system call.\n </p>",
"type": "String",

@@ -758,2 +754,8 @@ "optional": true

{
"name": "keyBase64",
"doc": "<p>Master key and salt (base64 encoded)</p>\n <p>\n This field provides the cryptographic master key appended with the master salt, encoded in base64. This allows to provide a binary key that is not limited to the ASCII character set.\n </p>\n <p>\n The expected length of the key (after being decoded from base64) is determined by the crypto-suite for which the key applies (30 bytes for AES_CM_128, 46 bytes for AES_CM_256). If the length does not match the expected value, the key will be considered invalid.\n </p>\n <p>\n If no key is provided, a random one will be generated using the `getrandom` system call.\n </p>",
"type": "String",
"optional": true
},
{
"name": "crypto",

@@ -767,2 +769,29 @@ "doc": "Selects the cryptographic suite to be used. For available values, please see the CryptoSuite enum.",

"doc": "Security Descriptions for Media Streams"
},
{
"typeFormat": "REGISTER",
"properties": [
{
"name": "isSeekable",
"doc": "Seek is possible in video source",
"type": "boolean"
},
{
"name": "seekableInit",
"doc": "First video position to do seek in ms",
"type": "int64"
},
{
"name": "seekableEnd",
"doc": "Last video position to do seek in ms",
"type": "int64"
},
{
"name": "duration",
"doc": "Video duration in ms",
"type": "int64"
}
],
"name": "VideoInfo",
"doc": ""
}

@@ -912,2 +941,8 @@ ],

{
"properties": [],
"extends": "Media",
"name": "EndOfStream",
"doc": "Event raised when the stream that the element sends out is finished."
},
{
"properties": [

@@ -927,8 +962,2 @@ {

"extends": "Media",
"name": "EndOfStream",
"doc": "Event raised when the stream that the element sends out is finished."
},
{
"properties": [],
"extends": "Media",
"name": "Recording",

@@ -935,0 +964,0 @@ "doc": "Fired when the recoding effectively starts. ie: Media is received by the recorder and record method has been called."

SocketSocket SOC 2 Logo

Product

  • Package Alerts
  • Integrations
  • Docs
  • Pricing
  • FAQ
  • Roadmap
  • Changelog

Packages

npm

Stay in touch

Get open source security insights delivered straight into your inbox.


  • Terms
  • Privacy
  • Security

Made with ⚡️ by Socket Inc