导入数据库表和音视频demo

This commit is contained in:
2023-09-14 14:59:57 +08:00
parent c0ca154d31
commit 736c5376e0
157 changed files with 11044 additions and 0 deletions

40
media-server-demo-node/.gitignore vendored Normal file
View File

@@ -0,0 +1,40 @@
# Logs
logs
*.log
npm-debug.log*
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# nyc test coverage
.nyc_output
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules
jspm_packages
# Optional npm cache directory
.npm
# Optional REPL history
.node_repl_history
/nbproject/private/
/recordings/*.mp4
/recordings/*.pcap

View File

@@ -0,0 +1,4 @@
{
"esversion":6,
"esnext": true
}

View File

@@ -0,0 +1,16 @@
# Use the current Long Term Support (LTS) version of Node.js
FROM node:16
# Copy the signalling server source code from the build context
COPY ./ /opt/media-server
# Install the dependencies for the signalling server
WORKDIR /opt/media-server
RUN npm install --registry=https://registry.npmmirror.com
EXPOSE 28000
# Set the signalling server as the container's entrypoint
CMD ["sh","/opt/media-server/entrypoint.sh"]

View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Sergio Garcia Murillo
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@@ -0,0 +1,36 @@
# media-server-demo-node
Demo application for the Medooze Media Server for Node.js
## Intallation
```
npm install
```
## Run
You need to run the demo passing as argument the public IP address of the media server that will be included in the SDP. This IP address is the one facing your clients.
```
node index.js <ip>
```
The demo will open an HTPPS/WSS server at port 8000.
## Demos
### SVC Layer selection
To run this demo just open `https://ip:8000/svc/` on a Chrome browser and follow instructions.
### Recording
To run this demo just open `https://ip:8000/rec/` with Chrome or Firefox.
### Broadcasting
To run this demo just open `https://ip:8000/broadcast/` with Chrome or Firefox and follow instructions.
### Simulcat
To run this demo just open `https://ip:8000/simulcast/` with Chrome or Firefox and follow instructions.

View File

@@ -0,0 +1,6 @@
#!/bin/sh
docker stop ue
docker rm ue
docker build -t ue:node .
docker run -it -d -p 8081:8081 -p 8888:8888 -p 8889:8889 --name ue ue:node

View File

@@ -0,0 +1,14 @@
#!/bin/sh
# entrypoint.sh
# read the file that sets variables
/usr/local/bin/node /opt/media-server/index.js 43.139.191.204
# /usr/local/bin/node /opt/SignallingWebServer/cirrus.js --peerConnectionOptions="{ \""iceServers\"": [{\""urls\"": [\""stun:stun4.l.google.com:19302\""]}]}"
# run the main container command
exec "$@"

View File

@@ -0,0 +1,128 @@
const https = require ('https');
const url = require ('url');
const fs = require ('fs');
const path = require ('path');
const WebSocketServer = require ('websocket').server;
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Check
if (process.argv.length!=3)
throw new Error("Missing IP address\nUsage: node index.js <ip>");
//Get ip
const ip = process.argv[2];
//Create UDP server endpoint
const endpoint = MediaServer.createEndpoint(ip);
const base = 'www';
const options = {
key: fs.readFileSync ('server.key'),
cert: fs.readFileSync ('server.cert')
};
//Enable debug
MediaServer.enableDebug(false);
MediaServer.enableUltraDebug(false);
//Restrict port range
MediaServer.setPortRange(10000,20000);
// maps file extention to MIME typere
const map = {
'.ico': 'image/x-icon',
'.html': 'text/html',
'.js': 'text/javascript',
'.json': 'application/json',
'.css': 'text/css',
'.png': 'image/png',
'.jpg': 'image/jpeg',
'.wav': 'audio/wav',
'.mp3': 'audio/mpeg',
'.svg': 'image/svg+xml',
'.pdf': 'application/pdf',
'.doc': 'application/msword'
};
//Create HTTP server
const server = https.createServer (options, (req, res) => {
// parse URL
const parsedUrl = url.parse (req.url);
// extract URL path
let pathname = base + parsedUrl.pathname;
// based on the URL path, extract the file extention. e.g. .js, .doc, ...
const ext = path.parse (pathname).ext;
//DO static file handling
fs.exists (pathname, (exist) => {
if (!exist)
{
// if the file is not found, return 404
res.statusCode = 404;
res.end (`File ${pathname} not found!`);
return;
}
// if is a directory search for index file matching the extention
if (fs.statSync (pathname).isDirectory ())
pathname += '/index.html';
// read file from file system
fs.readFile (pathname, (err, data) => {
if (err)
{
//Error
res.statusCode = 500;
res.end (`Error getting the file: ${err}.`);
} else {
// if the file is found, set Content-type and send data
res.setHeader ('Content-type', map[ext] || 'text/html');
res.end (data);
}
});
});
}).listen (28000);
const wsServer = new WebSocketServer ({
httpServer: server,
autoAcceptConnections: false
});
// Load the demo handlers
const handlers = {
"svc" : require("./lib/svc.js"),
"rec" : require("./lib/recording.js"),
"broadcast" : require("./lib/broadcast.js"),
"simulcast" : require("./lib/simulcast.js"),
"playback" : require("./lib/playback.js"),
"datachannels" : require("./lib/datachannels.js"),
};
wsServer.on ('request', (request) => {
//Get protocol for demo
var protocol = request.requestedProtocols[0];
console.log("-Got request for: " + protocol);
//If nor found
if (!handlers.hasOwnProperty (protocol))
//Reject connection
return request.reject();
//Process it
handlers[protocol](request,protocol,endpoint);
});
//Try to clean up on exit
const onExit = (e) => {
if (e) console.error(e);
MediaServer.terminate();
process.exit();
};
process.on("uncaughtException" , onExit);
process.on("SIGINT" , onExit);
process.on("SIGTERM" , onExit);
process.on("SIGQUIT" , onExit);

View File

@@ -0,0 +1,147 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
//Create new streamer
const streamer = MediaServer.createStreamer();
//Create new video session codecs
const video = new MediaInfo("video","video");
//Add h264 codec
video.addCodec(new CodecInfo("h264",96));
//Create session for video
const session = streamer.createSession(video,{
local : {
port: 5004
}
});
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport({
dtls : offer.getDTLS(),
ice : offer.getICE()
});
//Set RTP remote properties
transport.setRemoteProperties({
audio : offer.getMedia("audio"),
video : offer.getMedia("video")
});
//Get local DTLS and ICE info
const dtls = transport.getLocalDTLSInfo();
const ice = transport.getLocalICEInfo();
//Get local candidates
const candidates = endpoint.getLocalCandidates();
//Create local SDP info
let answer = new SDPInfo();
//Add ice and dtls info
answer.setDTLS(dtls);
answer.setICE(ice);
//For each local candidate
for (let i=0;i<candidates.length;++i)
//Add candidate to media info
answer.addCandidate(candidates[i]);
//Get remote video m-line info
let audioOffer = offer.getMedia("audio");
//If offer had video
if (audioOffer)
{
//Create video media
let audio = new MediaInfo(audioOffer.getId(), "audio");
//Set recv only
audio.setDirection(Direction.INACTIVE);
//Add it to answer
answer.addMedia(audio);
}
//Get remote video m-line info
let videoOffer = offer.getMedia("video");
//If offer had video
if (videoOffer)
{
//Create video media
let video = new MediaInfo(videoOffer.getId(), "video");
//Get codec types
let h264 = videoOffer.getCodec("h264");
//Add video codecs
video.addCodec(h264);
//Set recv only
video.setDirection(Direction.RECVONLY);
//Add it to answer
answer.addMedia(video);
}
//Set RTP local properties
transport.setLocalProperties({
audio : answer.getMedia("audio"),
video : answer.getMedia("video")
});
//Create new local stream with only video
const outgoingStream = transport.createOutgoingStream({
audio: false,
video: true
});
//Copy incoming data from the broadcast stream to the local one
outgoingStream.getVideoTracks()[0].attachTo(session.getIncomingStreamTrack());
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Add local stream info it to the answer
answer.addStream(info);
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
//Stop
transport.stop();
});
}
});
};

View File

@@ -0,0 +1,74 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
data : {
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
//transport.dump("/tmp/svc.pcap");
//Set RTP remote properties
transport.setRemoteProperties({
audio : offer.getMedia("audio"),
video : offer.getMedia("video")
});
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties({
video : answer.getMedia("video")
});
//Get offered stream info
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
//Stop
transport.stop();
});
}
});
};

View File

@@ -0,0 +1,134 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
const FileSystem = require("fs");
const Path = require("path");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
audio : {
codecs : ["opus"],
extensions : [ "urn:ietf:params:rtp-hdrext:ssrc-audio-level", "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
},
video : {
codecs : ["vp9","vp8","h264;packetization-mode=1"],
rtx : true,
rtcpfbs : [
{ "id": "transport-cc"},
{ "id": "ccm", "params": ["fir"]},
{ "id": "nack"},
{ "id": "nack", "params": ["pli"]}
],
extensions : [ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
let player;
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
let mp4;
//Get all files in recording dir
const files = FileSystem.readdirSync('recordings');
for(let i in files)
{
if (Path.extname(files[i])===".mp4")
{
//got ir
mp4 = files[i];
break;
}
}
//Check
if (!mp4)
{
console.error("no mp4 found");
return connection.close();
}
//Create player
player = MediaServer.createPlayer(Path.join("recordings",mp4));
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
//Set RTP remote properties
transport.setRemoteProperties(offer);
transport.dump("recordings/play-"+ Date.now()+".pcap",{
outgoing : true,
rtcp : true
});
//Set RTP remote properties
transport.setRemoteProperties(offer);
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties(answer);
//Create new local stream with only video
const outgoingStream = transport.createOutgoingStream({
audio: true,
video: true
});
//Copy incoming data from the broadcast stream to the local one
outgoingStream.attachTo(player);
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Add local stream info it to the answer
answer.addStream(info);
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
//Stop
transport.stop();
//Stop playback too
player.stop();
});
} else if (msg.cmd==="PLAY") {
player.play({
repeat : true
});
}
});
};

View File

@@ -0,0 +1,134 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
const FileSystem = require("fs");
const Path = require("path");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
audio : {
codecs : ["opus"],
extensions : [ "urn:ietf:params:rtp-hdrext:ssrc-audio-level", "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
},
video : {
codecs : ["vp9","vp8","h264;packetization-mode=1"],
rtx : true,
rtcpfbs : [
{ "id": "transport-cc"},
{ "id": "ccm", "params": ["fir"]},
{ "id": "nack"},
{ "id": "nack", "params": ["pli"]}
],
extensions : [ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
let player;
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
let mp4;
//Get all files in recording dir
const files = FileSystem.readdirSync('recordings');
for(let i in files)
{
if (Path.extname(files[i])===".mp4")
{
//got ir
mp4 = files[i];
break;
}
}
//Check
if (!mp4)
{
console.error("no mp4 found");
return connection.close();
}
//Create player
player = MediaServer.createPlayer(Path.join("recordings",mp4));
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
//Set RTP remote properties
transport.setRemoteProperties(offer);
transport.dump("recordings/play-"+ Date.now()+".pcap",{
outgoing : true,
rtcp : true
});
//Set RTP remote properties
transport.setRemoteProperties(offer);
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties(answer);
//Create new local stream with only video
const outgoingStream = transport.createOutgoingStream({
audio: true,
video: true
});
//Copy incoming data from the broadcast stream to the local one
outgoingStream.attachTo(player);
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Add local stream info it to the answer
answer.addStream(info);
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
//Stop
transport.stop();
//Stop playback too
player.stop();
});
} else if (msg.cmd==="PLAY") {
player.play({
repeat : true
});
}
});
};

View File

@@ -0,0 +1,121 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
audio : {
codecs : ["opus"],
extensions : [ "urn:ietf:params:rtp-hdrext:ssrc-audio-level"]
},
video : {
codecs : ["h264;packetization-mode=1"],
rtx : true,
rtcpfbs : [
{ "id": "transport-cc"},
{ "id": "ccm", "params": ["fir"]},
{ "id": "nack"},
{ "id": "nack", "params": ["pli"]}
],
extensions : [ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
console.log(msg.offer);
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create recoreder
const recorder = MediaServer.createRecorder ("recordings/"+ Date.now() +".mp4",{
// refresh : 15000,
// timeShift : 60000,
disableHints : true,
});
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
transport.setBandwidthProbing(true);
transport.setMaxProbingBitrate(1024000);
transport.on("targetbitrate", bitrate=>console.log("targetbitrate " + bitrate));
transport.on("dtlsstate", state=>console.log("dtlsstate:"+state));
transport.dump("recordings/rec-"+ Date.now()+".pcap");
//Set RTP remote properties
transport.setRemoteProperties(offer);
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties(answer);
//For each stream offered
for (let offered of offer.getStreams().values())
{
//Create the remote stream into the transport
const incomingStream = transport.createIncomingStream(offered);
//Create new local stream with only audio
const outgoingStream = transport.createOutgoingStream({
audio: false,
video: true
});
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Copy incoming data from the remote stream to the local one
outgoingStream.attachTo(incomingStream);
//Add local stream info it to the answer
answer.addStream(info);
//Record it
recorder.record(incomingStream);
}
//setTimeout(()=>recorder.flush(),60000);
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
console.log("close");
//Stop
recorder.stop();
transport && transport.stop();
});
}
});
};

View File

@@ -0,0 +1,141 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
video : {
codecs : ["h264;packetization-mode=1"],
rtx : true,
rtcpfbs : [
{ "id": "goog-remb"},
{ "id": "transport-cc"},
{ "id": "ccm", "params": ["fir"]},
{ "id": "nack"},
{ "id": "nack", "params": ["pli"]}
],
extensions : [
"urn:3gpp:video-orientation",
"http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01",
"urn:ietf:params:rtp-hdrext:sdes:mid",
"urn:ietf:params:rtp-hdrext:sdes:rtp-stream-id",
"urn:ietf:params:rtp-hdrext:sdes:repaired-rtp-stream-id",
"http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time"
],
simulcast : true
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
//Set RTP remote properties
transport.setRemoteProperties(offer);
//Enable bandwidth probing
transport.setBandwidthProbing(true);
transport.setMaxProbingBitrate(300*1000);
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties({
video : answer.getMedia("video")
});
//Get timestamp
const ts = Date.now();
//Dump contents
transport.dump("recordings/simulcast-"+ts+".pcap");
//Create recoreder
//const recorder = MediaServer.createRecorder ("recordings/simulcast"+ts +".mp4");
//For each stream offered
for (let offered of offer.getStreams().values())
{
//Create the remote stream into the transport
const incomingStream = transport.createIncomingStream(offered);
//Create new local stream
const outgoingStream = transport.createOutgoingStream({
audio: false,
video: true
});
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Copy incoming data from the remote stream to the local one
connection.transporder = outgoingStream.attachTo(incomingStream)[0];
//Add local stream info it to the answer
answer.addStream(info);
setInterval(()=>{
//console.dir(incomingStream.getStats(),{depth:null});
//console.log(outgoingStream.getStats());
},1000);
//Record it
//recorder.record(incomingStream);
}
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString().replace("h264","H264")
}));
console.log("OFFER");
console.log(msg.offer);
console.log("ANSWER");
console.log(answer.toString().replace("h264","H264"));
//Close on disconnect
connection.on("close",() => {
//Stop transport an recorded
transport.stop();
//recorder.stop();
});
} else {
connection.transporder.selectEncoding(msg.rid);
//Select layer
connection.transporder.selectLayer(parseInt(msg.spatialLayerId),parseInt(msg.temporalLayerId));
}
});
};

View File

@@ -0,0 +1,153 @@
//Get the Medooze Media Server interface
const MediaServer = require("medooze-media-server");
//Get Semantic SDP objects
const SemanticSDP = require("semantic-sdp");
const SDPInfo = SemanticSDP.SDPInfo;
const MediaInfo = SemanticSDP.MediaInfo;
const CandidateInfo = SemanticSDP.CandidateInfo;
const DTLSInfo = SemanticSDP.DTLSInfo;
const ICEInfo = SemanticSDP.ICEInfo;
const StreamInfo = SemanticSDP.StreamInfo;
const TrackInfo = SemanticSDP.TrackInfo;
const Direction = SemanticSDP.Direction;
const CodecInfo = SemanticSDP.CodecInfo;
const Capabilities = {
video : {
codecs : ["vp9"],
rtx : true,
rtcpfbs : [
{ "id": "transport-cc"},
{ "id": "ccm", "params": ["fir"]},
{ "id": "nack"},
{ "id": "nack", "params": ["pli"]}
],
extensions : [ "http://www.ietf.org/id/draft-holmer-rmcat-transport-wide-cc-extensions-01"]
}
};
module.exports = function(request,protocol,endpoint)
{
const connection = request.accept(protocol);
connection.on('message', (frame) =>
{
//Get cmd
var msg = JSON.parse(frame.utf8Data);
//Get cmd
if (msg.cmd==="OFFER")
{
const streams = {};
//Process the sdp
var offer = SDPInfo.process(msg.offer);
//Create an DTLS ICE transport in that enpoint
const transport = endpoint.createTransport(offer);
//Enable probing
transport.setBandwidthProbing(true);
transport.setMaxProbingBitrate(512000);
//DUMP
//transport.dump("recordings/svc-"+ts+".pcap",{incoming:true,rtcp:true,rtpHeadersOnly:true,bwe:true});
//Set RTP remote properties
transport.setRemoteProperties({
audio : offer.getMedia("audio"),
video : offer.getMedia("video")
});
//Create local SDP info
const answer = offer.answer({
dtls : transport.getLocalDTLSInfo(),
ice : transport.getLocalICEInfo(),
candidates : endpoint.getLocalCandidates(),
capabilities : Capabilities
});
//Set RTP local properties
transport.setLocalProperties({
video : answer.getMedia("video")
});
//Get offered stream info
const offered = offer.getFirstStream();
//Create the remote stream into the transports
const incomingStream = transport.createIncomingStream(offered);
//Create new local stream
const outgoingStream = transport.createOutgoingStream({
audio: false,
video: true
});
//Get local stream info
const info = outgoingStream.getStreamInfo();
//Copy incoming data from the remote stream to the local one
const transponder = connection.transporder = outgoingStream.attachTo(incomingStream)[0];
//Start on low
transponder.selectLayer(0,0);
//Listen for bwe events
transport.on("targetbitrate", bitrate=>{
//Get previous layer ids
const sid = transponder.getSelectedSpatialLayerId();
const tid = transponder.getSelectedTemporalLayerId();
//Select stream layer from bitrate
const rate = transponder.setTargetBitrate(bitrate);
//Get next layer
const next = rate.layers[rate.layerIndex-1];
//Probing
let probing = false;
//If the jump is lower
if (next)
{
//Set probing bitrate
probing = next.bitrate-rate;
//Set it on transport
transport.setMaxProbingBitrate(next.bitrate-rate);
//Enable
transport.setBandwidthProbing(true);
} else
//Disable
transport.setBandwidthProbing(false);
//Log
console.log("targetbitrate :" + bitrate + " probing:" + probing +" sid:" + transponder.getSelectedSpatialLayerId() + " tid:" +transponder.getSelectedTemporalLayerId());
//If changed
if (sid!=transponder.getSelectedSpatialLayerId() || tid!=transponder.getSelectedTemporalLayerId())
//Send response
connection.sendUTF(JSON.stringify({
sid : transponder.getSelectedSpatialLayerId(),
tid : transponder.getSelectedTemporalLayerId()
}));
});
//Add local stream info it to the answer
answer.addStream(info);
//Add to streams
streams[incomingStream.getId()] = incomingStream;
//Send response
connection.sendUTF(JSON.stringify({
answer : answer.toString()
}));
//Close on disconnect
connection.on("close",() => {
//Stop
transport.stop();
});
} else {
//Select layer
connection.transporder.selectLayer(parseInt(msg.spatialLayerId),parseInt(msg.temporalLayerId));
}
});
};

View File

@@ -0,0 +1,24 @@
{
"name": "medooze-media-server-demo",
"version": "1.1.0",
"description": "Demo application for the Medooze Media Server for Node.js",
"main": "index.js",
"scripts": {
"install": "openssl req -nodes -new -x509 -keyout server.key -out server.cert -subj \"/CN=localhost\""
},
"repository": {
"type": "git",
"url": "git+https://github.com/murillo128/media-server-demo-node.git"
},
"author": "Sergio Garcia Murillo",
"license": "MIT",
"bugs": {
"url": "https://github.com/murillo128/media-server-demo-node/issues"
},
"homepage": "https://github.com/murillo128/media-server-demo-node#readme",
"dependencies": {
"medooze-media-server": "^0",
"semantic-sdp": "^3",
"websocket": "^1.0.26"
}
}

View File

@@ -0,0 +1,200 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<style>
html {
zoom: 90%;
}
body {
background: #e2e1e0;
text-align: center;
margin: 0px;
padding: 0px;
font-size: 9px;
color: #555;
font-family: Roboto;
text-align: -webkit-center;
}
table {
margin: 10px;
position: relative;
left: -40px;
}
video {
object-fit: cover;
float: left;
background: #fff;
border-radius: 2px;
display: inline-block;
margin: 1rem;
position: relative;
width: 420px;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.5s cubic-bezier(.25,.8,.25,1);
padding:1px;
bottom: 0px;
height: 315px;
}
#container {
top: 10px;
left: 10px;
margin: 0px;
padding: 0px;
width: 900px;
}
.container-video {
width: 50%;
float: left;
display: inline-block;
position: relative;
font-size: 24px;
top: 5px;
margin-bottom: 10px;
}
.gaugeChartContainer {
position: relative;
width: 190px;
height: 120px;
float: left;
padding: 10px;
}
.gaugeChart {
position: relative;
text-align: center;
}
.gaugeChart canvas {
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
.gaugeChartLabel {
display: inline-block;
position: absolute;
float: left;
left: 0;
top: 55px;
width: 100%;
text-align: center;
color: #FFFFFF;
font-size: 24px;
font-weight: bold;
z-index: 1;
text-shadow: #333 0px 0px 2px;
}
.gaugeChartContainer {
position: relative;
font-size: 9px;
}
.gaugeChartTitle {
display: inline-block;
position: absolute;
float: left;
top: 0px;
left: 0;
width: 100%;
text-align: center;
color: #888;
font-weight: bold;
font-size: 12px;
}
.gaugeChartMin {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 92%;
margin-left: 8%;
text-align: left;
color: #888;
font-weight: bold;
}
.gaugeChartMax {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 95%;
margin-right: 5%;
text-align: right;
color: #888;
font-weight: bold;
}
td {
margin: 5px;
padding: 5px;
text-align: center;
}
.ready-dialog
{
width: 780px;
text-align: left;
}
.ready-dialog p
{
color: black;
font-size: 12pt;
}
.ready-dialog code
{
font-size: 12pt;
}
</style>
<script src="../js/gauge.min.js" type="text/javascript"></script>
</head>
<body>
<div id="container">
<div class="container-video">REMOTE
<video id="remote"></video>
</div>
<dialog class="ready-dialog mdl-dialog">
<h4 class="mdl-dialog__title">Ready to test H264 broadcasting?</h4>
<div class="mdl-dialog__content">
<p>
This demo allows you to test the streaming capabilities of the Medooze Media Server.
</p>
<p>
You will need to open an mp4 file with VLC and start streaming it with:
<pre>
<code>
:sout=#duplicate{dst=rtp{dst=your_ip_address,port=5004,sap,name=sergio},dst=display}
:no-sout-audio
:sout-vide
:sout-keep
</code>
</pre>
</p>
</div>
<div class="mdl-dialog__actions">
<button type="button" class="ready mdl-button mdl-button mdl-button--raised mdl-button--accent">Ready!</button>
</div>
</dialog>
</body>
</html>
<script src="js/broadcast.js" type="text/javascript"></script>

View File

@@ -0,0 +1,121 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
const roomId = (new Date()).getTime() + "-" + Math.random();
function addVideoForStream(stream,muted)
{
//Create new video element
const video = document.querySelector (muted ? "#local" : "#remote");
//Set same id
video.id = stream.id;
//Set src stream
video.src = URL.createObjectURL(stream);
//Set other properties
video.autoplay = true;
video.muted = muted;
}
function removeVideoForStream(stream)
{
//Get video
var video = document.getElementById(stream.id);
//Remove it when done
video.addEventListener('webkitTransitionEnd',function(){
//Delete it
video.parentElement.removeChild(video);
});
//Disable it first
video.className = "disabled";
}
var sdp;
var pc;
function connect()
{
if (window.RTCPeerConnection)
pc = new RTCPeerConnection({
bundlePolicy: "max-bundle",
rtcpMuxPolicy : "require"
});
else
pc = new webkitRTCPeerConnection(null);
var ws = new WebSocket(url,"broadcast");
pc.onaddstream = function(event) {
var prev = 0;
console.debug("onAddStream",event);
//Play it
addVideoForStream(event.stream);
};
pc.onremovestream = function(event) {
console.debug("onRemoveStream",event);
//Play it
removeVideoForStream(event.stream);
};
ws.onopen = function(){
console.log("opened");
//Create new offer
pc.createOffer({
offerToReceiveVideo: true
})
.then(function(offer){
console.debug("createOffer sucess",offer);
//We have sdp
sdp = offer.sdp;
//Set it
pc.setLocalDescription(offer);
console.log(sdp);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
})
.catch(function(error){
console.error("Error",error);
});
};
ws.onmessage = function(event){
console.log(event);
//Get protocol message
const msg = JSON.parse(event.data);
console.log(msg.answer);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
sdp: msg.answer
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
};
}
var dialog = document.querySelector('dialog');
if (dialog.showModal)
{
dialog.showModal();
dialog.querySelector('.ready').addEventListener('click', function() {
dialog.close();
connect();
});
} else {
connect();
}

View File

@@ -0,0 +1,16 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
</head>
<body>
</body>
</html>
<script src="js/datachannels.js" type="text/javascript"></script>

View File

@@ -0,0 +1,50 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
var sdp;
var pc;
function connect()
{
//Create PC
pc = new RTCPeerConnection();
const dc = pc.createDataChannel("aaaaaaaaaaaaaaaa");
var ws = new WebSocket(url,"datachannels");
ws.onopen = async function() {
//Create new offer
const offer = await pc.createOffer();
//We have sdp
sdp = offer.sdp;
console.log("offer",sdp);
//Set it
await pc.setLocalDescription(offer);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
};
ws.onmessage = function(event){
//Get protocol message
const msg = JSON.parse(event.data);
const answer = msg.answer.replace("m=application 9 UDP/TLS/RTP/SAVPF","m=application 9 DTLS/SCTP 5000") + "a=sctpmap:5000 webrtc-datachannel 1024\r\n"
console.log("answer",answer);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
sdp: answer
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
};
}
connect();

View File

@@ -0,0 +1,738 @@
(function() {
// nb. This is for IE10 and lower _only_.
var supportCustomEvent = window.CustomEvent;
if (!supportCustomEvent || typeof supportCustomEvent === 'object') {
supportCustomEvent = function CustomEvent(event, x) {
x = x || {};
var ev = document.createEvent('CustomEvent');
ev.initCustomEvent(event, !!x.bubbles, !!x.cancelable, x.detail || null);
return ev;
};
supportCustomEvent.prototype = window.Event.prototype;
}
/**
* @param {Element} el to check for stacking context
* @return {boolean} whether this el or its parents creates a stacking context
*/
function createsStackingContext(el) {
while (el && el !== document.body) {
var s = window.getComputedStyle(el);
var invalid = function(k, ok) {
return !(s[k] === undefined || s[k] === ok);
}
if (s.opacity < 1 ||
invalid('zIndex', 'auto') ||
invalid('transform', 'none') ||
invalid('mixBlendMode', 'normal') ||
invalid('filter', 'none') ||
invalid('perspective', 'none') ||
s['isolation'] === 'isolate' ||
s.position === 'fixed' ||
s.webkitOverflowScrolling === 'touch') {
return true;
}
el = el.parentElement;
}
return false;
}
/**
* Finds the nearest <dialog> from the passed element.
*
* @param {Element} el to search from
* @return {HTMLDialogElement} dialog found
*/
function findNearestDialog(el) {
while (el) {
if (el.localName === 'dialog') {
return /** @type {HTMLDialogElement} */ (el);
}
el = el.parentElement;
}
return null;
}
/**
* Blur the specified element, as long as it's not the HTML body element.
* This works around an IE9/10 bug - blurring the body causes Windows to
* blur the whole application.
*
* @param {Element} el to blur
*/
function safeBlur(el) {
if (el && el.blur && el !== document.body) {
el.blur();
}
}
/**
* @param {!NodeList} nodeList to search
* @param {Node} node to find
* @return {boolean} whether node is inside nodeList
*/
function inNodeList(nodeList, node) {
for (var i = 0; i < nodeList.length; ++i) {
if (nodeList[i] === node) {
return true;
}
}
return false;
}
/**
* @param {HTMLFormElement} el to check
* @return {boolean} whether this form has method="dialog"
*/
function isFormMethodDialog(el) {
if (!el || !el.hasAttribute('method')) {
return false;
}
return el.getAttribute('method').toLowerCase() === 'dialog';
}
/**
* @param {!HTMLDialogElement} dialog to upgrade
* @constructor
*/
function dialogPolyfillInfo(dialog) {
this.dialog_ = dialog;
this.replacedStyleTop_ = false;
this.openAsModal_ = false;
// Set a11y role. Browsers that support dialog implicitly know this already.
if (!dialog.hasAttribute('role')) {
dialog.setAttribute('role', 'dialog');
}
dialog.show = this.show.bind(this);
dialog.showModal = this.showModal.bind(this);
dialog.close = this.close.bind(this);
if (!('returnValue' in dialog)) {
dialog.returnValue = '';
}
if ('MutationObserver' in window) {
var mo = new MutationObserver(this.maybeHideModal.bind(this));
mo.observe(dialog, {attributes: true, attributeFilter: ['open']});
} else {
// IE10 and below support. Note that DOMNodeRemoved etc fire _before_ removal. They also
// seem to fire even if the element was removed as part of a parent removal. Use the removed
// events to force downgrade (useful if removed/immediately added).
var removed = false;
var cb = function() {
removed ? this.downgradeModal() : this.maybeHideModal();
removed = false;
}.bind(this);
var timeout;
var delayModel = function(ev) {
if (ev.target !== dialog) { return; } // not for a child element
var cand = 'DOMNodeRemoved';
removed |= (ev.type.substr(0, cand.length) === cand);
window.clearTimeout(timeout);
timeout = window.setTimeout(cb, 0);
};
['DOMAttrModified', 'DOMNodeRemoved', 'DOMNodeRemovedFromDocument'].forEach(function(name) {
dialog.addEventListener(name, delayModel);
});
}
// Note that the DOM is observed inside DialogManager while any dialog
// is being displayed as a modal, to catch modal removal from the DOM.
Object.defineProperty(dialog, 'open', {
set: this.setOpen.bind(this),
get: dialog.hasAttribute.bind(dialog, 'open')
});
this.backdrop_ = document.createElement('div');
this.backdrop_.className = 'backdrop';
this.backdrop_.addEventListener('click', this.backdropClick_.bind(this));
}
dialogPolyfillInfo.prototype = {
get dialog() {
return this.dialog_;
},
/**
* Maybe remove this dialog from the modal top layer. This is called when
* a modal dialog may no longer be tenable, e.g., when the dialog is no
* longer open or is no longer part of the DOM.
*/
maybeHideModal: function() {
if (this.dialog_.hasAttribute('open') && document.body.contains(this.dialog_)) { return; }
this.downgradeModal();
},
/**
* Remove this dialog from the modal top layer, leaving it as a non-modal.
*/
downgradeModal: function() {
if (!this.openAsModal_) { return; }
this.openAsModal_ = false;
this.dialog_.style.zIndex = '';
// This won't match the native <dialog> exactly because if the user set top on a centered
// polyfill dialog, that top gets thrown away when the dialog is closed. Not sure it's
// possible to polyfill this perfectly.
if (this.replacedStyleTop_) {
this.dialog_.style.top = '';
this.replacedStyleTop_ = false;
}
// Clear the backdrop and remove from the manager.
this.backdrop_.parentNode && this.backdrop_.parentNode.removeChild(this.backdrop_);
dialogPolyfill.dm.removeDialog(this);
},
/**
* @param {boolean} value whether to open or close this dialog
*/
setOpen: function(value) {
if (value) {
this.dialog_.hasAttribute('open') || this.dialog_.setAttribute('open', '');
} else {
this.dialog_.removeAttribute('open');
this.maybeHideModal(); // nb. redundant with MutationObserver
}
},
/**
* Handles clicks on the fake .backdrop element, redirecting them as if
* they were on the dialog itself.
*
* @param {!Event} e to redirect
*/
backdropClick_: function(e) {
if (!this.dialog_.hasAttribute('tabindex')) {
// Clicking on the backdrop should move the implicit cursor, even if dialog cannot be
// focused. Create a fake thing to focus on. If the backdrop was _before_ the dialog, this
// would not be needed - clicks would move the implicit cursor there.
var fake = document.createElement('div');
this.dialog_.insertBefore(fake, this.dialog_.firstChild);
fake.tabIndex = -1;
fake.focus();
this.dialog_.removeChild(fake);
} else {
this.dialog_.focus();
}
var redirectedEvent = document.createEvent('MouseEvents');
redirectedEvent.initMouseEvent(e.type, e.bubbles, e.cancelable, window,
e.detail, e.screenX, e.screenY, e.clientX, e.clientY, e.ctrlKey,
e.altKey, e.shiftKey, e.metaKey, e.button, e.relatedTarget);
this.dialog_.dispatchEvent(redirectedEvent);
e.stopPropagation();
},
/**
* Focuses on the first focusable element within the dialog. This will always blur the current
* focus, even if nothing within the dialog is found.
*/
focus_: function() {
// Find element with `autofocus` attribute, or fall back to the first form/tabindex control.
var target = this.dialog_.querySelector('[autofocus]:not([disabled])');
if (!target && this.dialog_.tabIndex >= 0) {
target = this.dialog_;
}
if (!target) {
// Note that this is 'any focusable area'. This list is probably not exhaustive, but the
// alternative involves stepping through and trying to focus everything.
var opts = ['button', 'input', 'keygen', 'select', 'textarea'];
var query = opts.map(function(el) {
return el + ':not([disabled])';
});
// TODO(samthor): tabindex values that are not numeric are not focusable.
query.push('[tabindex]:not([disabled]):not([tabindex=""])'); // tabindex != "", not disabled
target = this.dialog_.querySelector(query.join(', '));
}
safeBlur(document.activeElement);
target && target.focus();
},
/**
* Sets the zIndex for the backdrop and dialog.
*
* @param {number} dialogZ
* @param {number} backdropZ
*/
updateZIndex: function(dialogZ, backdropZ) {
if (dialogZ < backdropZ) {
throw new Error('dialogZ should never be < backdropZ');
}
this.dialog_.style.zIndex = dialogZ;
this.backdrop_.style.zIndex = backdropZ;
},
/**
* Shows the dialog. If the dialog is already open, this does nothing.
*/
show: function() {
if (!this.dialog_.open) {
this.setOpen(true);
this.focus_();
}
},
/**
* Show this dialog modally.
*/
showModal: function() {
if (this.dialog_.hasAttribute('open')) {
throw new Error('Failed to execute \'showModal\' on dialog: The element is already open, and therefore cannot be opened modally.');
}
if (!document.body.contains(this.dialog_)) {
throw new Error('Failed to execute \'showModal\' on dialog: The element is not in a Document.');
}
if (!dialogPolyfill.dm.pushDialog(this)) {
throw new Error('Failed to execute \'showModal\' on dialog: There are too many open modal dialogs.');
}
if (createsStackingContext(this.dialog_.parentElement)) {
console.warn('A dialog is being shown inside a stacking context. ' +
'This may cause it to be unusable. For more information, see this link: ' +
'https://github.com/GoogleChrome/dialog-polyfill/#stacking-context');
}
this.setOpen(true);
this.openAsModal_ = true;
// Optionally center vertically, relative to the current viewport.
if (dialogPolyfill.needsCentering(this.dialog_)) {
dialogPolyfill.reposition(this.dialog_);
this.replacedStyleTop_ = true;
} else {
this.replacedStyleTop_ = false;
}
// Insert backdrop.
this.dialog_.parentNode.insertBefore(this.backdrop_, this.dialog_.nextSibling);
// Focus on whatever inside the dialog.
this.focus_();
},
/**
* Closes this HTMLDialogElement. This is optional vs clearing the open
* attribute, however this fires a 'close' event.
*
* @param {string=} opt_returnValue to use as the returnValue
*/
close: function(opt_returnValue) {
if (!this.dialog_.hasAttribute('open')) {
throw new Error('Failed to execute \'close\' on dialog: The element does not have an \'open\' attribute, and therefore cannot be closed.');
}
this.setOpen(false);
// Leave returnValue untouched in case it was set directly on the element
if (opt_returnValue !== undefined) {
this.dialog_.returnValue = opt_returnValue;
}
// Triggering "close" event for any attached listeners on the <dialog>.
var closeEvent = new supportCustomEvent('close', {
bubbles: false,
cancelable: false
});
this.dialog_.dispatchEvent(closeEvent);
}
};
var dialogPolyfill = {};
dialogPolyfill.reposition = function(element) {
var scrollTop = document.body.scrollTop || document.documentElement.scrollTop;
var topValue = scrollTop + (window.innerHeight - element.offsetHeight) / 2;
element.style.top = Math.max(scrollTop, topValue) + 'px';
};
dialogPolyfill.isInlinePositionSetByStylesheet = function(element) {
for (var i = 0; i < document.styleSheets.length; ++i) {
var styleSheet = document.styleSheets[i];
var cssRules = null;
// Some browsers throw on cssRules.
try {
cssRules = styleSheet.cssRules;
} catch (e) {}
if (!cssRules) { continue; }
for (var j = 0; j < cssRules.length; ++j) {
var rule = cssRules[j];
var selectedNodes = null;
// Ignore errors on invalid selector texts.
try {
selectedNodes = document.querySelectorAll(rule.selectorText);
} catch(e) {}
if (!selectedNodes || !inNodeList(selectedNodes, element)) {
continue;
}
var cssTop = rule.style.getPropertyValue('top');
var cssBottom = rule.style.getPropertyValue('bottom');
if ((cssTop && cssTop !== 'auto') || (cssBottom && cssBottom !== 'auto')) {
return true;
}
}
}
return false;
};
dialogPolyfill.needsCentering = function(dialog) {
var computedStyle = window.getComputedStyle(dialog);
if (computedStyle.position !== 'absolute') {
return false;
}
// We must determine whether the top/bottom specified value is non-auto. In
// WebKit/Blink, checking computedStyle.top == 'auto' is sufficient, but
// Firefox returns the used value. So we do this crazy thing instead: check
// the inline style and then go through CSS rules.
if ((dialog.style.top !== 'auto' && dialog.style.top !== '') ||
(dialog.style.bottom !== 'auto' && dialog.style.bottom !== '')) {
return false;
}
return !dialogPolyfill.isInlinePositionSetByStylesheet(dialog);
};
/**
* @param {!Element} element to force upgrade
*/
dialogPolyfill.forceRegisterDialog = function(element) {
if (window.HTMLDialogElement || element.showModal) {
console.warn('This browser already supports <dialog>, the polyfill ' +
'may not work correctly', element);
}
if (element.localName !== 'dialog') {
throw new Error('Failed to register dialog: The element is not a dialog.');
}
new dialogPolyfillInfo(/** @type {!HTMLDialogElement} */ (element));
};
/**
* @param {!Element} element to upgrade, if necessary
*/
dialogPolyfill.registerDialog = function(element) {
if (!element.showModal) {
dialogPolyfill.forceRegisterDialog(element);
}
};
/**
* @constructor
*/
dialogPolyfill.DialogManager = function() {
/** @type {!Array<!dialogPolyfillInfo>} */
this.pendingDialogStack = [];
var checkDOM = this.checkDOM_.bind(this);
// The overlay is used to simulate how a modal dialog blocks the document.
// The blocking dialog is positioned on top of the overlay, and the rest of
// the dialogs on the pending dialog stack are positioned below it. In the
// actual implementation, the modal dialog stacking is controlled by the
// top layer, where z-index has no effect.
this.overlay = document.createElement('div');
this.overlay.className = '_dialog_overlay';
this.overlay.addEventListener('click', function(e) {
this.forwardTab_ = undefined;
e.stopPropagation();
checkDOM([]); // sanity-check DOM
}.bind(this));
this.handleKey_ = this.handleKey_.bind(this);
this.handleFocus_ = this.handleFocus_.bind(this);
this.zIndexLow_ = 100000;
this.zIndexHigh_ = 100000 + 150;
this.forwardTab_ = undefined;
if ('MutationObserver' in window) {
this.mo_ = new MutationObserver(function(records) {
var removed = [];
records.forEach(function(rec) {
for (var i = 0, c; c = rec.removedNodes[i]; ++i) {
if (!(c instanceof Element)) {
continue;
} else if (c.localName === 'dialog') {
removed.push(c);
}
removed = removed.concat(c.querySelectorAll('dialog'));
}
});
removed.length && checkDOM(removed);
});
}
};
/**
* Called on the first modal dialog being shown. Adds the overlay and related
* handlers.
*/
dialogPolyfill.DialogManager.prototype.blockDocument = function() {
document.documentElement.addEventListener('focus', this.handleFocus_, true);
document.addEventListener('keydown', this.handleKey_);
this.mo_ && this.mo_.observe(document, {childList: true, subtree: true});
};
/**
* Called on the first modal dialog being removed, i.e., when no more modal
* dialogs are visible.
*/
dialogPolyfill.DialogManager.prototype.unblockDocument = function() {
document.documentElement.removeEventListener('focus', this.handleFocus_, true);
document.removeEventListener('keydown', this.handleKey_);
this.mo_ && this.mo_.disconnect();
};
/**
* Updates the stacking of all known dialogs.
*/
dialogPolyfill.DialogManager.prototype.updateStacking = function() {
var zIndex = this.zIndexHigh_;
for (var i = 0, dpi; dpi = this.pendingDialogStack[i]; ++i) {
dpi.updateZIndex(--zIndex, --zIndex);
if (i === 0) {
this.overlay.style.zIndex = --zIndex;
}
}
// Make the overlay a sibling of the dialog itself.
var last = this.pendingDialogStack[0];
if (last) {
var p = last.dialog.parentNode || document.body;
p.appendChild(this.overlay);
} else if (this.overlay.parentNode) {
this.overlay.parentNode.removeChild(this.overlay);
}
};
/**
* @param {Element} candidate to check if contained or is the top-most modal dialog
* @return {boolean} whether candidate is contained in top dialog
*/
dialogPolyfill.DialogManager.prototype.containedByTopDialog_ = function(candidate) {
while (candidate = findNearestDialog(candidate)) {
for (var i = 0, dpi; dpi = this.pendingDialogStack[i]; ++i) {
if (dpi.dialog === candidate) {
return i === 0; // only valid if top-most
}
}
candidate = candidate.parentElement;
}
return false;
};
dialogPolyfill.DialogManager.prototype.handleFocus_ = function(event) {
if (this.containedByTopDialog_(event.target)) { return; }
event.preventDefault();
event.stopPropagation();
safeBlur(/** @type {Element} */ (event.target));
if (this.forwardTab_ === undefined) { return; } // move focus only from a tab key
var dpi = this.pendingDialogStack[0];
var dialog = dpi.dialog;
var position = dialog.compareDocumentPosition(event.target);
if (position & Node.DOCUMENT_POSITION_PRECEDING) {
if (this.forwardTab_) { // forward
dpi.focus_();
} else { // backwards
document.documentElement.focus();
}
} else {
// TODO: Focus after the dialog, is ignored.
}
return false;
};
dialogPolyfill.DialogManager.prototype.handleKey_ = function(event) {
this.forwardTab_ = undefined;
if (event.keyCode === 27) {
event.preventDefault();
event.stopPropagation();
var cancelEvent = new supportCustomEvent('cancel', {
bubbles: false,
cancelable: true
});
var dpi = this.pendingDialogStack[0];
if (dpi && dpi.dialog.dispatchEvent(cancelEvent)) {
dpi.dialog.close();
}
} else if (event.keyCode === 9) {
this.forwardTab_ = !event.shiftKey;
}
};
/**
* Finds and downgrades any known modal dialogs that are no longer displayed. Dialogs that are
* removed and immediately readded don't stay modal, they become normal.
*
* @param {!Array<!HTMLDialogElement>} removed that have definitely been removed
*/
dialogPolyfill.DialogManager.prototype.checkDOM_ = function(removed) {
// This operates on a clone because it may cause it to change. Each change also calls
// updateStacking, which only actually needs to happen once. But who removes many modal dialogs
// at a time?!
var clone = this.pendingDialogStack.slice();
clone.forEach(function(dpi) {
if (removed.indexOf(dpi.dialog) !== -1) {
dpi.downgradeModal();
} else {
dpi.maybeHideModal();
}
});
};
/**
* @param {!dialogPolyfillInfo} dpi
* @return {boolean} whether the dialog was allowed
*/
dialogPolyfill.DialogManager.prototype.pushDialog = function(dpi) {
var allowed = (this.zIndexHigh_ - this.zIndexLow_) / 2 - 1;
if (this.pendingDialogStack.length >= allowed) {
return false;
}
if (this.pendingDialogStack.unshift(dpi) === 1) {
this.blockDocument();
}
this.updateStacking();
return true;
};
/**
* @param {!dialogPolyfillInfo} dpi
*/
dialogPolyfill.DialogManager.prototype.removeDialog = function(dpi) {
var index = this.pendingDialogStack.indexOf(dpi);
if (index === -1) { return; }
this.pendingDialogStack.splice(index, 1);
if (this.pendingDialogStack.length === 0) {
this.unblockDocument();
}
this.updateStacking();
};
dialogPolyfill.dm = new dialogPolyfill.DialogManager();
dialogPolyfill.formSubmitter = null;
dialogPolyfill.useValue = null;
/**
* Installs global handlers, such as click listers and native method overrides. These are needed
* even if a no dialog is registered, as they deal with <form method="dialog">.
*/
if (window.HTMLDialogElement === undefined) {
/**
* If HTMLFormElement translates method="DIALOG" into 'get', then replace the descriptor with
* one that returns the correct value.
*/
var testForm = document.createElement('form');
testForm.setAttribute('method', 'dialog');
if (testForm.method !== 'dialog') {
var methodDescriptor = Object.getOwnPropertyDescriptor(HTMLFormElement.prototype, 'method');
if (methodDescriptor) {
// nb. Some older iOS and older PhantomJS fail to return the descriptor. Don't do anything
// and don't bother to update the element.
var realGet = methodDescriptor.get;
methodDescriptor.get = function() {
if (isFormMethodDialog(this)) {
return 'dialog';
}
return realGet.call(this);
};
var realSet = methodDescriptor.set;
methodDescriptor.set = function(v) {
if (typeof v === 'string' && v.toLowerCase() === 'dialog') {
return this.setAttribute('method', v);
}
return realSet.call(this, v);
};
Object.defineProperty(HTMLFormElement.prototype, 'method', methodDescriptor);
}
}
/**
* Global 'click' handler, to capture the <input type="submit"> or <button> element which has
* submitted a <form method="dialog">. Needed as Safari and others don't report this inside
* document.activeElement.
*/
document.addEventListener('click', function(ev) {
dialogPolyfill.formSubmitter = null;
dialogPolyfill.useValue = null;
if (ev.defaultPrevented) { return; } // e.g. a submit which prevents default submission
var target = /** @type {Element} */ (ev.target);
if (!target || !isFormMethodDialog(target.form)) { return; }
var valid = (target.type === 'submit' && ['button', 'input'].indexOf(target.localName) > -1);
if (!valid) {
if (!(target.localName === 'input' && target.type === 'image')) { return; }
// this is a <input type="image">, which can submit forms
dialogPolyfill.useValue = ev.offsetX + ',' + ev.offsetY;
}
var dialog = findNearestDialog(target);
if (!dialog) { return; }
dialogPolyfill.formSubmitter = target;
}, false);
/**
* Replace the native HTMLFormElement.submit() method, as it won't fire the
* submit event and give us a chance to respond.
*/
var nativeFormSubmit = HTMLFormElement.prototype.submit;
var replacementFormSubmit = function () {
if (!isFormMethodDialog(this)) {
return nativeFormSubmit.call(this);
}
var dialog = findNearestDialog(this);
dialog && dialog.close();
};
HTMLFormElement.prototype.submit = replacementFormSubmit;
/**
* Global form 'dialog' method handler. Closes a dialog correctly on submit
* and possibly sets its return value.
*/
document.addEventListener('submit', function(ev) {
var form = /** @type {HTMLFormElement} */ (ev.target);
if (!isFormMethodDialog(form)) { return; }
ev.preventDefault();
var dialog = findNearestDialog(form);
if (!dialog) { return; }
// Forms can only be submitted via .submit() or a click (?), but anyway: sanity-check that
// the submitter is correct before using its value as .returnValue.
var s = dialogPolyfill.formSubmitter;
if (s && s.form === form) {
dialog.close(dialogPolyfill.useValue || s.value);
} else {
dialog.close();
}
dialogPolyfill.formSubmitter = null;
}, true);
}
dialogPolyfill['forceRegisterDialog'] = dialogPolyfill.forceRegisterDialog;
dialogPolyfill['registerDialog'] = dialogPolyfill.registerDialog;
if (typeof define === 'function' && 'amd' in define) {
// AMD support
define(function() { return dialogPolyfill; });
} else if (typeof module === 'object' && typeof module['exports'] === 'object') {
// CommonJS support
module['exports'] = dialogPolyfill;
} else {
// all others
window['dialogPolyfill'] = dialogPolyfill;
}
})();

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,536 @@
(function(f){if(typeof exports==="object"&&typeof module!=="undefined"){module.exports=f()}else if(typeof define==="function"&&define.amd){define([],f)}else{var g;if(typeof window!=="undefined"){g=window}else if(typeof global!=="undefined"){g=global}else if(typeof self!=="undefined"){g=self}else{g=this}g.TransactionManager = f()}})(function(){var define,module,exports;return (function e(t,n,r){function s(o,u){if(!n[o]){if(!t[o]){var a=typeof require=="function"&&require;if(!u&&a)return a(o,!0);if(i)return i(o,!0);var f=new Error("Cannot find module '"+o+"'");throw f.code="MODULE_NOT_FOUND",f}var l=n[o]={exports:{}};t[o][0].call(l.exports,function(e){var n=t[o][1][e];return s(n?n:e)},l,l.exports,e,t,n,r)}return n[o].exports}var i=typeof require=="function"&&require;for(var o=0;o<r.length;o++)s(r[o]);return s})({1:[function(require,module,exports){
// Copyright Joyent, Inc. and other Node contributors.
//
// Permission is hereby granted, free of charge, to any person obtaining a
// copy of this software and associated documentation files (the
// "Software"), to deal in the Software without restriction, including
// without limitation the rights to use, copy, modify, merge, publish,
// distribute, sublicense, and/or sell copies of the Software, and to permit
// persons to whom the Software is furnished to do so, subject to the
// following conditions:
//
// The above copyright notice and this permission notice shall be included
// in all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
// USE OR OTHER DEALINGS IN THE SOFTWARE.
function EventEmitter() {
this._events = this._events || {};
this._maxListeners = this._maxListeners || undefined;
}
module.exports = EventEmitter;
// Backwards-compat with node 0.10.x
EventEmitter.EventEmitter = EventEmitter;
EventEmitter.prototype._events = undefined;
EventEmitter.prototype._maxListeners = undefined;
// By default EventEmitters will print a warning if more than 10 listeners are
// added to it. This is a useful default which helps finding memory leaks.
EventEmitter.defaultMaxListeners = 10;
// Obviously not all Emitters should be limited to 10. This function allows
// that to be increased. Set to zero for unlimited.
EventEmitter.prototype.setMaxListeners = function(n) {
if (!isNumber(n) || n < 0 || isNaN(n))
throw TypeError('n must be a positive number');
this._maxListeners = n;
return this;
};
EventEmitter.prototype.emit = function(type) {
var er, handler, len, args, i, listeners;
if (!this._events)
this._events = {};
// If there is no 'error' event listener then throw.
if (type === 'error') {
if (!this._events.error ||
(isObject(this._events.error) && !this._events.error.length)) {
er = arguments[1];
if (er instanceof Error) {
throw er; // Unhandled 'error' event
} else {
// At least give some kind of context to the user
var err = new Error('Uncaught, unspecified "error" event. (' + er + ')');
err.context = er;
throw err;
}
}
}
handler = this._events[type];
if (isUndefined(handler))
return false;
if (isFunction(handler)) {
switch (arguments.length) {
// fast cases
case 1:
handler.call(this);
break;
case 2:
handler.call(this, arguments[1]);
break;
case 3:
handler.call(this, arguments[1], arguments[2]);
break;
// slower
default:
args = Array.prototype.slice.call(arguments, 1);
handler.apply(this, args);
}
} else if (isObject(handler)) {
args = Array.prototype.slice.call(arguments, 1);
listeners = handler.slice();
len = listeners.length;
for (i = 0; i < len; i++)
listeners[i].apply(this, args);
}
return true;
};
EventEmitter.prototype.addListener = function(type, listener) {
var m;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events)
this._events = {};
// To avoid recursion in the case that type === "newListener"! Before
// adding it to the listeners, first emit "newListener".
if (this._events.newListener)
this.emit('newListener', type,
isFunction(listener.listener) ?
listener.listener : listener);
if (!this._events[type])
// Optimize the case of one listener. Don't need the extra array object.
this._events[type] = listener;
else if (isObject(this._events[type]))
// If we've already got an array, just append.
this._events[type].push(listener);
else
// Adding the second element, need to change to array.
this._events[type] = [this._events[type], listener];
// Check for listener leak
if (isObject(this._events[type]) && !this._events[type].warned) {
if (!isUndefined(this._maxListeners)) {
m = this._maxListeners;
} else {
m = EventEmitter.defaultMaxListeners;
}
if (m && m > 0 && this._events[type].length > m) {
this._events[type].warned = true;
console.error('(node) warning: possible EventEmitter memory ' +
'leak detected. %d listeners added. ' +
'Use emitter.setMaxListeners() to increase limit.',
this._events[type].length);
if (typeof console.trace === 'function') {
// not supported in IE 10
console.trace();
}
}
}
return this;
};
EventEmitter.prototype.on = EventEmitter.prototype.addListener;
EventEmitter.prototype.once = function(type, listener) {
if (!isFunction(listener))
throw TypeError('listener must be a function');
var fired = false;
function g() {
this.removeListener(type, g);
if (!fired) {
fired = true;
listener.apply(this, arguments);
}
}
g.listener = listener;
this.on(type, g);
return this;
};
// emits a 'removeListener' event iff the listener was removed
EventEmitter.prototype.removeListener = function(type, listener) {
var list, position, length, i;
if (!isFunction(listener))
throw TypeError('listener must be a function');
if (!this._events || !this._events[type])
return this;
list = this._events[type];
length = list.length;
position = -1;
if (list === listener ||
(isFunction(list.listener) && list.listener === listener)) {
delete this._events[type];
if (this._events.removeListener)
this.emit('removeListener', type, listener);
} else if (isObject(list)) {
for (i = length; i-- > 0;) {
if (list[i] === listener ||
(list[i].listener && list[i].listener === listener)) {
position = i;
break;
}
}
if (position < 0)
return this;
if (list.length === 1) {
list.length = 0;
delete this._events[type];
} else {
list.splice(position, 1);
}
if (this._events.removeListener)
this.emit('removeListener', type, listener);
}
return this;
};
EventEmitter.prototype.removeAllListeners = function(type) {
var key, listeners;
if (!this._events)
return this;
// not listening for removeListener, no need to emit
if (!this._events.removeListener) {
if (arguments.length === 0)
this._events = {};
else if (this._events[type])
delete this._events[type];
return this;
}
// emit removeListener for all listeners on all events
if (arguments.length === 0) {
for (key in this._events) {
if (key === 'removeListener') continue;
this.removeAllListeners(key);
}
this.removeAllListeners('removeListener');
this._events = {};
return this;
}
listeners = this._events[type];
if (isFunction(listeners)) {
this.removeListener(type, listeners);
} else if (listeners) {
// LIFO order
while (listeners.length)
this.removeListener(type, listeners[listeners.length - 1]);
}
delete this._events[type];
return this;
};
EventEmitter.prototype.listeners = function(type) {
var ret;
if (!this._events || !this._events[type])
ret = [];
else if (isFunction(this._events[type]))
ret = [this._events[type]];
else
ret = this._events[type].slice();
return ret;
};
EventEmitter.prototype.listenerCount = function(type) {
if (this._events) {
var evlistener = this._events[type];
if (isFunction(evlistener))
return 1;
else if (evlistener)
return evlistener.length;
}
return 0;
};
EventEmitter.listenerCount = function(emitter, type) {
return emitter.listenerCount(type);
};
function isFunction(arg) {
return typeof arg === 'function';
}
function isNumber(arg) {
return typeof arg === 'number';
}
function isObject(arg) {
return typeof arg === 'object' && arg !== null;
}
function isUndefined(arg) {
return arg === void 0;
}
},{}],2:[function(require,module,exports){
"use strict";
const EventEmitter = require('events');
class Namespace extends EventEmitter
{
constructor(namespace,tm)
{
super();
this.namespace = namespace;
this.tm = tm;
}
cmd(name,data)
{
return this.tm.cmd(name,data,this.namespace);
}
event(name,data)
{
return this.tm.event(name,data,this.namespace);
}
close()
{
return this.tm.namespaces.delete(this.namespace);
}
};
class TransactionManager extends EventEmitter
{
constructor(transport)
{
super();
this.maxId = 0;
this.namespaces = new Map();
this.transactions = new Map();
this.transport = transport;
//Message event listener
this.listener = (msg) => {
//Process message
var message = JSON.parse(msg.utf8Data || msg.data);
//Check type
switch(message.type)
{
case "cmd" :
//Create command
const cmd = {
name : message.name,
data : message.data,
namespace : message.namespace,
accept : (data) => {
//Send response back
transport.send(JSON.stringify ({
type : "response",
transId : message.transId,
data : data
}));
},
reject : (data) => {
//Send response back
transport.send(JSON.stringify ({
type : "error",
transId : message.transId,
data : data
}));
}
};
//If it has a namespace
if (cmd.namespace)
{
//Get namespace
const namespace = this.namespaces.get(cmd.namespace);
//If we have it
if (namespace)
//trigger event only on namespace
namespace.emit("cmd",cmd);
else
//Launch event on main event handler
this.emit("cmd",cmd);
} else {
//Launch event on main event handler
this.emit("cmd",cmd);
}
break;
case "response":
{
//Get transaction
const transaction = this.transactions.get(message.transId);
if (!transaction)
return;
//delete transacetion
this.transactions.delete(message.transId);
//Accept
transaction.resolve(message.data);
break;
}
case "error":
{
//Get transaction
const transaction = this.transactions.get(message.transId);
if (!transaction)
return;
//delete transacetion
this.transactions.delete(message.transId);
//Reject
transaction.reject(message.data);
break;
}
case "event":
//Create event
const event = {
name : message.name,
data : message.data,
namespace : message.namespace,
};
//If it has a namespace
if (event.namespace)
{
//Get namespace
var namespace = this.namespaces.get(event.namespace);
//If we have it
if (namespace)
//trigger event
namespace.emit("event",event);
else
//Launch event on main event handler
this.emit("event",event);
} else {
//Launch event on main event handler
this.emit("event",event);
}
break;
}
};
//Add it
this.transport.addListener ? this.transport.addListener("message",this.listener) : this.transport.addEventListener("message",this.listener);
}
cmd(name,data,namespace)
{
return new Promise((resolve,reject) => {
//Check name is correct
if (!name || name.length===0)
throw new Error("Bad command name");
//Create command
const cmd = {
type : "cmd",
transId : this.maxId++,
name : name,
data : data
};
//Check namespace
if (namespace)
//Add it
cmd.namespace = namespace;
//Serialize
const json = JSON.stringify(cmd);
//Add callbacks
cmd.resolve = resolve;
cmd.reject = reject;
//Add to map
this.transactions.set(cmd.transId,cmd);
try {
//Send json
this.transport.send(json);
} catch (e) {
//delete transacetion
this.transactions.delete(cmd.transId);
//rethrow
throw e;
}
});
}
event(name,data,namespace)
{
//Check name is correct
if (!name || name.length===0)
throw new Error("Bad event name");
//Create command
const event = {
type : "event",
name : name,
data : data
};
//Check namespace
if (namespace)
//Add it
event.namespace = namespace;
//Serialize
const json = JSON.stringify(event);
//Send json
return this.transport.send(json);
}
namespace(ns)
{
//Check if we already have them
let namespace = this.namespaces.get(ns);
//If already have it
if (namespace) return namespace;
//Create one instead
namespace = new Namespace(ns,this);
//Store it
this.namespaces.set(ns, namespace);
//ok
return namespace;
}
close()
{
//Erase namespaces
for (const ns of this.namespace.values())
//terminate it
ns.close();
//remove lisnters
this.transport.removeListener ? this.transport.removeListener("message",this.listener) : this.transport.removeEventListener("message",this.listener);
}
};
module.exports = TransactionManager;
},{"events":1}]},{},[2])(2)
});

View File

@@ -0,0 +1,193 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<style>
html {
zoom: 90%;
}
body {
background: #e2e1e0;
text-align: center;
margin: 0px;
padding: 0px;
font-size: 9px;
color: #555;
font-family: Roboto;
text-align: -webkit-center;
}
table {
margin: 10px;
position: relative;
left: -40px;
}
video {
object-fit: cover;
float: left;
background: #fff;
border-radius: 2px;
display: inline-block;
margin: 1rem;
position: relative;
width: 420px;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.5s cubic-bezier(.25,.8,.25,1);
padding:1px;
bottom: 0px;
height: 315px;
}
#container {
top: 10px;
left: 10px;
margin: 0px;
padding: 0px;
width: 900px;
}
.container-video {
width: 50%;
float: left;
display: inline-block;
position: relative;
font-size: 24px;
top: 5px;
margin-bottom: 10px;
}
.gaugeChartContainer {
position: relative;
width: 190px;
height: 120px;
float: left;
padding: 10px;
}
.gaugeChart {
position: relative;
text-align: center;
}
.gaugeChart canvas {
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
.gaugeChartLabel {
display: inline-block;
position: absolute;
float: left;
left: 0;
top: 55px;
width: 100%;
text-align: center;
color: #FFFFFF;
font-size: 24px;
font-weight: bold;
z-index: 1;
text-shadow: #333 0px 0px 2px;
}
.gaugeChartContainer {
position: relative;
font-size: 9px;
}
.gaugeChartTitle {
display: inline-block;
position: absolute;
float: left;
top: 0px;
left: 0;
width: 100%;
text-align: center;
color: #888;
font-weight: bold;
font-size: 12px;
}
.gaugeChartMin {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 92%;
margin-left: 8%;
text-align: left;
color: #888;
font-weight: bold;
}
.gaugeChartMax {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 95%;
margin-right: 5%;
text-align: right;
color: #888;
font-weight: bold;
}
td {
margin: 5px;
padding: 5px;
text-align: center;
}
.ready-dialog
{
width: 780px;
text-align: left;
}
.ready-dialog p
{
color: black;
font-size: 12pt;
}
.ready-dialog code
{
font-size: 12pt;
}
</style>
<script src="../js/gauge.min.js" type="text/javascript"></script>
</head>
<body>
<div id="container">
<div class="container-video">REMOTE
<video id="remote"></video>
</div>
<dialog class="ready-dialog mdl-dialog">
<h4 class="mdl-dialog__title">Ready to test mp4 playback?</h4>
<div class="mdl-dialog__content">
<p>
This demo allows you to test the mp4 play capabilities of the Medooze Media Server.
</p>
<p>
It will payback the latest mp4 file recorded with the recording demo and stored on the recording dir.
</p>
</div>
<div class="mdl-dialog__actions">
<button type="button" class="ready mdl-button mdl-button mdl-button--raised mdl-button--accent">Ready!</button>
</div>
</dialog>
</div>
</body>
</html>
<script src="js/playback.js" type="text/javascript"></script>

View File

@@ -0,0 +1,134 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
const roomId = (new Date()).getTime() + "-" + Math.random();
function addVideoForStream(stream,muted)
{
//Create new video element
const video = document.querySelector (muted ? "#local" : "#remote");
//Set same id
video.id = stream.id;
//Set src stream
video.srcObject = stream;
//Set other properties
video.autoplay = true;
video.muted = muted;
}
function removeVideoForStream(stream)
{
//Get video
var video = document.getElementById(stream.id);
//Remove it when done
video.addEventListener('webkitTransitionEnd',function(){
//Delete it
video.parentElement.removeChild(video);
});
//Disable it first
video.className = "disabled";
}
var sdp;
var pc;
function connect()
{
if (window.RTCPeerConnection)
pc = new RTCPeerConnection({
bundlePolicy: "max-bundle",
rtcpMuxPolicy : "require"
});
else
pc = new webkitRTCPeerConnection(null);
var ws = new WebSocket(url,"playback");
pc.onaddstream = function(event) {
var prev = 0;
console.debug("onAddStream",event);
//Play it
addVideoForStream(event.stream);
};
pc.onremovestream = function(event) {
console.debug("onRemoveStream",event);
//Play it
removeVideoForStream(event.stream);
};
ws.onopen = function(){
console.log("opened");
//Create new offer
pc.createOffer({
offerToReceiveAudio: true,
offerToReceiveVideo: true
})
.then(function(offer){
console.debug("createOffer sucess",offer);
//We have sdp
sdp = offer.sdp;
//Set it
pc.setLocalDescription(offer);
console.log(sdp);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
})
.catch(function(error){
console.error("Error",error);
});
};
ws.onmessage = function(event){
console.log(event);
//Get protocol message
const msg = JSON.parse(event.data);
console.log(msg.answer);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
sdp: msg.answer
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
pc.addEventListener("connectionstatechange",(event)=>{
if (pc.connectionState=="connected")
{
console.log("CONNECTED");
//Start playing
ws.send(JSON.stringify({
cmd : "PLAY"
}));
}
});
};
}
var dialog = document.querySelector('dialog');
if (dialog.showModal)
{
dialog.showModal();
dialog.querySelector('.ready').addEventListener('click', function() {
dialog.close();
connect();
});
} else {
connect();
}

View File

@@ -0,0 +1,197 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<script src="../js/dialog-polyfill.js" type="text/javascript"></script>
<style>
html {
zoom: 90%;
}
body {
background: #e2e1e0;
text-align: center;
margin: 0px;
padding: 0px;
font-size: 9px;
color: #555;
font-family: Roboto;
text-align: -webkit-center;
}
table {
margin: 10px;
position: relative;
left: -40px;
}
video {
object-fit: cover;
float: left;
background: #fff;
border-radius: 2px;
display: inline-block;
margin: 1rem;
position: relative;
width: 420px;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.5s cubic-bezier(.25,.8,.25,1);
padding:1px;
bottom: 0px;
height: 315px;
}
#container {
top: 10px;
left: 10px;
margin: 0px;
padding: 0px;
width: 900px;
}
.container-video {
width: 50%;
float: left;
display: inline-block;
position: relative;
font-size: 24px;
top: 5px;
margin-bottom: 10px;
}
.gaugeChartContainer {
position: relative;
width: 190px;
height: 120px;
float: left;
padding: 10px;
}
.gaugeChart {
position: relative;
text-align: center;
}
.gaugeChart canvas {
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
.gaugeChartLabel {
display: inline-block;
position: absolute;
float: left;
left: 0;
top: 55px;
width: 100%;
text-align: center;
color: #FFFFFF;
font-size: 24px;
font-weight: bold;
z-index: 1;
text-shadow: #333 0px 0px 2px;
}
.gaugeChartContainer {
position: relative;
font-size: 9px;
}
.gaugeChartTitle {
display: inline-block;
position: absolute;
float: left;
top: 0px;
left: 0;
width: 100%;
text-align: center;
color: #888;
font-weight: bold;
font-size: 12px;
}
.gaugeChartMin {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 92%;
margin-left: 8%;
text-align: left;
color: #888;
font-weight: bold;
}
.gaugeChartMax {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 95%;
margin-right: 5%;
text-align: right;
color: #888;
font-weight: bold;
}
td {
margin: 5px;
padding: 5px;
text-align: center;
}
.ready-dialog
{
width: 780px;
text-align: left;
}
.ready-dialog p
{
color: black;
font-size: 12pt;
}
.ready-dialog code
{
font-size: 12pt;
}
</style>
<script src="../js/gauge.min.js" type="text/javascript"></script>
</head>
<body>
<div id="container">
<div class="container-video">LOCAL
<video id="local"></video>
</div>
<div class="container-video">REMOTE
<video id="remote"></video>
</div>
<dialog class="ready-dialog mdl-dialog">
<h4 class="mdl-dialog__title">Ready to test MP4 recording?</h4>
<div class="mdl-dialog__content">
<p>
This demo allows you to test the MP4 Recording of the Medooze Media Server.
</p>
<p>
The call will be recorded on an mp4 file located at the /tmp folder of the server you are running the Medooze Media Server.
</p>
</div>
<div class="mdl-dialog__actions">
<button type="button" class="ready mdl-button mdl-button mdl-button--raised mdl-button--accent">Ready!</button>
</div>
</dialog>
</div>
</body>
</html>
<script src="js/rec.js" type="text/javascript"></script>

View File

@@ -0,0 +1,159 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
let videoResolution = true;
//Get our url
const href = new URL (window.location.href);
if (href.searchParams.has ("video"))
switch (href.searchParams.get ("video").toLowerCase ())
{
case "1080p":
videoResolution = {
width: {min: 1920, max: 1920},
height: {min: 1080, max: 1080},
};
break;
case "720p":
videoResolution = {
width: {min: 1280, max: 1280},
height: {min: 720, max: 720},
};
break;
case "576p":
videoResolution = {
width: {min: 720, max: 720},
height: {min: 576, max: 576},
};
break;
case "480p":
videoResolution = {
width: {min: 640, max: 640},
height: {min: 480, max: 480},
};
break;
case "no":
videoResolution = false;
break;
}
const roomId = (new Date()).getTime() + "-" + Math.random();
var texts = document.querySelectorAll('.gaugeChartLabel');
function addVideoForStream(stream,muted)
{
//Create new video element
const video = document.querySelector (muted ? "#local" : "#remote");
//Set src stream
video.srcObject = stream;
//Set other properties
video.autoplay = true;
video.muted = muted;
}
function removeVideoForStream(stream)
{
//Get video
var video = document.getElementById(stream.id);
//Remove it when done
video.addEventListener('webkitTransitionEnd',function(){
//Delete it
video.parentElement.removeChild(video);
});
//Disable it first
video.className = "disabled";
}
var sdp;
var pc;
function connect()
{
pc = new RTCPeerConnection(null);
var ws = new WebSocket(url,"rec");
pc.onaddstream = function(event) {
var prev = 0;
console.debug("onAddStream",event);
//Play it
addVideoForStream(event.stream);
};
pc.onremovestream = function(event) {
console.debug("onRemoveStream",event);
//Play it
removeVideoForStream(event.stream);
};
ws.onopen = function(){
console.log("opened");
navigator.mediaDevices.getUserMedia({
audio: true,
video: videoResolution
})
.then(function(stream){
var prev = 0;
console.debug("getUserMedia sucess",stream);
//Play it
addVideoForStream(stream,true);
window.s = stream;
//Add stream to peer connection
pc.addStream(stream);
//Create new offer
return pc.createOffer(stream);
})
.then(function(offer){
console.debug("createOffer sucess",offer);
//We have sdp
sdp = offer.sdp;
//Set it
pc.setLocalDescription(offer);
console.log(sdp);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
})
.catch(function(error){
console.error("Error",error);
alert(error);
});
};
ws.onmessage = function(event){
console.log(event);
//Get protocol message
const msg = JSON.parse(event.data);
console.log(msg.answer);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
sdp: msg.answer
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
};
}
var dialog = document.querySelector('dialog');
if (dialog.showModal)
{
dialog.showModal();
dialog.querySelector('.ready').addEventListener('click', function() {
dialog.close();
connect();
});
} else {
connect();
}

View File

@@ -0,0 +1,286 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<script src="../js/dialog-polyfill.js" type="text/javascript"></script>
<style>
html {
zoom: 90%;
}
body {
background: #e2e1e0;
text-align: center;
margin: 0px;
padding: 0px;
font-size: 9px;
color: #555;
font-family: Roboto;
text-align: -webkit-center;
}
table {
margin: 10px;
position: relative;
}
video {
object-fit: cover;
float: left;
background: #fff;
border-radius: 2px;
display: inline-block;
margin: 1rem;
position: relative;
width: 420px;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.5s cubic-bezier(.25,.8,.25,1);
padding:1px;
bottom: 0px;
height: 315px;
}
#container {
top: 10px;
left: 10px;
margin: 0px;
padding: 0px;
width: 900px;
}
.container-video {
width: 50%;
float: left;
display: inline-block;
position: relative;
font-size: 24px;
top: 5px;
margin-bottom: 10px;
}
.gaugeChartContainer {
position: relative;
width: 190px;
height: 120px;
float: left;
padding: 10px;
}
.gaugeChart {
position: relative;
text-align: center;
}
.gaugeChart canvas {
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
.gaugeChartLabel {
display: inline-block;
position: absolute;
float: left;
left: 0;
top: 55px;
width: 100%;
text-align: center;
color: #FFFFFF;
font-size: 24px;
font-weight: bold;
z-index: 1;
text-shadow: #333 0px 0px 2px;
}
.gaugeChartContainer {
position: relative;
font-size: 9px;
}
.gaugeChartTitle {
display: inline-block;
position: absolute;
float: left;
top: 0px;
left: 0;
width: 100%;
text-align: center;
color: #888;
font-weight: bold;
font-size: 12px;
}
.gaugeChartMin {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 92%;
margin-left: 8%;
text-align: left;
color: #888;
font-weight: bold;
}
.gaugeChartMax {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 95%;
margin-right: 5%;
text-align: right;
color: #888;
font-weight: bold;
}
td {
margin: 5px;
padding: 5px;
text-align: center;
}
.ready-dialog
{
width: 780px;
text-align: left;
}
.ready-dialog p
{
color: black;
font-size: 12pt;
}
.ready-dialog code
{
font-size: 12pt;
}
</style>
<script src="../js/gauge.min.js" type="text/javascript"></script>
</head>
<body>
<div id="container">
<div class="container-video">LOCAL
<video id="local"></video>
</div>
<div class="container-video">REMOTE
<video id="remote"></video>
</div>
<table>
<tr>
<td></td>
<td>FPS/4</td>
<td>FPS/2</td>
<td>FPS</td>
</tr>
<tr data-rid="a">
<td>High</td>
<td><button data-rid="a" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer RID A T0</button></td>
<td><button data-rid="a" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer RID A T1</button></td>
<td><button data-rid="a" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised">Layer RID A T2</button></td>
</tr>
<tr data-rid="b">
<td>Medium</td>
<td><button data-rid="b" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer RID B T0</button></td>
<td><button data-rid="b" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer RID B T1</button></td>
<td><button data-rid="b" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised">Layer RID B T2</button></td>
</tr>
<tr data-rid="c">
<td>Low</td>
<td><button data-rid="c" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer RID C T0</button></td>
<td><button data-rid="c" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer RID C T1</button></td>
<td><button data-rid="c" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised mdl-button--colored">Layer RID C T2</button></td>
</tr>
</table>
<div class="gaugeChartContainer">
<canvas id="g_s_w" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_w"class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent Width</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">640</span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_h" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_h" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent Height</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">480</span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_f" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_f" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent FPS</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">30</span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_b" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_b" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent kbps</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">1024</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_w" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_w" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv Width</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">640</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_h" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_h" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv Heigth</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">480</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_f" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_f" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv FPS</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">30</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_b" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_b" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv kbps</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">1024</span>
</div>
</div>
<dialog class="ready-dialog mdl-dialog">
<h4 class="mdl-dialog__title">Ready to test VP8 Simulcast layer selection?</h4>
<div class="mdl-dialog__content">
<p>
This demo allows you to test the P8 Simulcast layer selection</a>.
</p>
<p>
This uses standard WebRTC rid/encodings api, so it only works on firefox currently
</p>
</div>
<div class="mdl-dialog__actions">
<button type="button" class="ready mdl-button mdl-button mdl-button--raised mdl-button--accent">Ready!</button>
</div>
</dialog>
</body>
</html>
<script src="js/simulcast.js" type="text/javascript"></script>

View File

@@ -0,0 +1,427 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
let videoResolution = true;
//Get our url
const href = new URL (window.location.href);
if (href.searchParams.has ("video"))
switch (href.searchParams.get ("video").toLowerCase ())
{
case "1080p":
videoResolution = {
width: {min: 1920, max: 1920},
height: {min: 1080, max: 1080},
};
break;
case "720p":
videoResolution = {
width: {min: 1280, max: 1280},
height: {min: 720, max: 720},
};
break;
case "576p":
videoResolution = {
width: {min: 720, max: 720},
height: {min: 576, max: 576},
};
break;
case "480p":
videoResolution = {
width: {min: 640, max: 640},
height: {min: 480, max: 480},
};
break;
case "no":
videoResolution = false;
break;
}
var opts = {
lines: 12, // The number of lines to draw
angle: 0.15, // The length of each line
lineWidth: 0.44, // 0.44 The line thickness
pointer: {
length: 0.8, // 0.9 The radius of the inner circle
strokeWidth: 0.035, // The rotation offset
color: '#A0A0A0' // Fill color
},
limitMax: true,
colorStart: '#28c1d1', // Colors
colorStop: '#28c1d1', // just experiment with them
strokeColor: '#F0F0F0', // to see which ones work best for you
generateGradient: false,
gradientType: 0
};
var targets = document.querySelectorAll('.gaugeChart'); // your canvas element
var gauges = [];
for (var i=0;i<targets.length;++i)
{
gauges[i] = new Gauge(targets[i]).setOptions (opts); // create sexy gauge!
gauges[i].animationSpeed = 10000; // set animation speed (32 is default value)
gauges[i].set (0); // set actual value
}
var texts = document.querySelectorAll('.gaugeChartLabel');
var max = document.querySelectorAll('.gaugeChartMax');
max[0].innerText = gauges[0].maxValue = videoResolution.width ? videoResolution.width.max : 640;
max[1].innerText = gauges[1].maxValue = videoResolution.height ? videoResolution.height.max : 480;
max[2].innerText = gauges[2].maxValue = 30;
max[3].innerText = gauges[3].maxValue = 2048;
max[4].innerText = gauges[4].maxValue = videoResolution.width ? videoResolution.width.max : 640;
max[5].innerText = gauges[5].maxValue = videoResolution.height ? videoResolution.height.max : 480;
max[6].innerText = gauges[6].maxValue = 30;
max[7].innerText = gauges[7].maxValue = 2048;
var ssrcs;
function addVideoForStream(stream,muted)
{
//Create new video element
const video = document.querySelector (muted ? "#local" : "#remote");
//Set same id
video.streamid = stream.id;
//Set src stream
video.srcObject = stream;
//Set other properties
video.autoplay = true;
video.muted = muted;
}
//Get user media promise based
function getUserMedia(constrains)
{
return new Promise(function(resolve,reject) {
//Get it
navigator.getUserMedia(constrains,
function(stream){
resolve(stream);
},
function(error){
reject(error);
});
});
}
var pc;
let simulcast_03 = false;
let sdpMungling = false;
function connect()
{
//Create PC
pc = new RTCPeerConnection({sdpSemantics : "plan-b"});
var ws = new WebSocket(url,"simulcast");
pc.ontrack = function(event) {
var prev = 0,prevFrames = 0,prevBytes = 0;
console.debug("ontrack",event);
const stream = event.streams[0];
//Play it
addVideoForStream(stream);
//Get track
var track = stream.getVideoTracks()[0];
//Update stats
setInterval(async function(){
var results;
try {
//For ff
results = await pc.getStats(track);
} catch(e) {
//For chrome
results = await pc.getStats();
}
var width = track.width || remote.videoWidth;
var height = track.height || remote.videoHeight;
//Get results
for (let result of results.values())
{
if (result.type==="inbound-rtp")
{
//Get timestamp delta
var delta = result.timestamp-prev;
//Store this ts
prev = result.timestamp;
//Get values
var fps = (result.framesDecoded-prevFrames)*1000/delta;
var kbps = (result.bytesReceived-prevBytes)*8/delta;
//Store last values
prevFrames = result.framesDecoded;
prevBytes = result.bytesReceived;
//If first
if (delta==result.timestamp || isNaN(fps) || isNaN (kbps))
return;
for (var i=4;i<targets.length;++i)
gauges[i].animationSpeed = 10000000; // set animation speed (32 is default value)
gauges[6].set(Math.min(Math.floor(fps) ,30));
gauges[7].set(Math.min(Math.floor(kbps) ,gauges[7].maxValue));
texts[6].innerText = Math.floor(fps);
texts[7].innerText = Math.floor(kbps);
} else if (result.type==="track") {
//Update stats
width = result.frameWidth;
height = result.frameHeight;
}
}
gauges[4].set(width);
gauges[5].set(height);
texts[4].innerText = width;
texts[5].innerText = height;
},1000);
};
ws.onopen = function(){
console.log("opened");
navigator.mediaDevices.getUserMedia({
audio: false,
video: videoResolution
})
.then(function(stream){
var prev = 0;
var prevFrames = 0;
var prevBytes = 0;
var track = stream.getVideoTracks()[0];
console.debug("getUserMedia sucess",stream);
//Play it
addVideoForStream(stream,true);
//Update stats
setInterval(async function(){
var results;
try {
//For ff
results = await pc.getStats(track);
} catch(e) {
//For chrome
results = await pc.getStats();
}
var width = track.width || local.videoWidth;//result.stat("googFrameWidthReceived");
var height = track.height || local.videoHeight;//result.stat("googFrameHeightReceived");
//Get results
for (let result of results.values())
{
if (result.type==="outbound-rtp")
{
//Get timestamp delta
var delta = result.timestamp-prev;
//Store this ts
prev = result.timestamp;
//Get values
var fps = ((result.framesEncoded-prevFrames)*1000/delta);
var kbps = (result.bytesSent-prevBytes)*8/delta;
//Store last values
prevFrames = result.framesEncoded;
prevBytes = result.bytesSent;
//If first
if (delta==result.timestamp || isNaN(fps) || isNaN (kbps))
return;
for (var i=0;i<4;++i)
gauges[i].animationSpeed = 10000000; // set animation speed (32 is default value)
gauges[2].set(Math.min(Math.floor(fps) ,30));
gauges[3].set(Math.min(Math.floor(kbps) ,gauges[3].maxValue));
texts[2].innerText = Math.floor(fps);
texts[3].innerText = Math.floor(kbps);
} else if (result.type==="track") {
//Update stats
width = result.frameWidth;
height = result.frameHeight;
}
}
},1000);
window.s = stream;
//Add stream tracks to peer connection
stream.getTracks().forEach(track => pc.addTrack(track, stream));
//Check API "compatibility"
if (pc.getSenders()[0].setParameters)
{
try {
//Enable simulcast
pc.getSenders()[0].setParameters({
encodings: [
{ rid: "a"},
{ rid: "b" , scaleDownResolutionBy: 2.0 },
{ rid: "c" , scaleDownResolutionBy: 4.0 }
]
});
} catch(e) {
}
}
//Create new offer
return pc.createOffer();
})
.then(function(offer){
console.debug("createOffer sucess",offer);
//Get offer
let sdp = offer.sdp;
//Check simulcast 04 format
if (sdp.indexOf(": send rid"))
{
//Convert from simulcast_03 to simulcast
sdp = sdp.replace(": send rid=",":send ");
//We need to modify answer too
simulcast_03 = true;
}
//If offer doesn't have simulcast
if (sdp.indexOf("simulcast")==-1)
try {
//OK, chrome way
const reg1 = RegExp("m=video.*\?a=ssrc:(\\d*) cname:(.+?)\\r\\n","s");
const reg2 = RegExp("m=video.*\?a=ssrc:(\\d*) mslabel:(.+?)\\r\\n","s");
const reg3 = RegExp("m=video.*\?a=ssrc:(\\d*) msid:(.+?)\\r\\n","s");
const reg4 = RegExp("m=video.*\?a=ssrc:(\\d*) label:(.+?)\\r\\n","s");
//Get ssrc and cname
let res = reg1.exec(sdp);
const ssrc = res[1];
const cname = res[2];
//Get other params
const mslabel = reg2.exec(sdp)[2];
const msid = reg3.exec(sdp)[2];
const label = reg4.exec(sdp)[2];
//Add simulcasts ssrcs
const num = 2;
const ssrcs = [ssrc];
for (let i=0;i<num;++i)
{
//Create new ssrcs
const ssrc = 100+i*2;
const rtx = ssrc+1;
//Add to ssrc list
ssrcs.push(ssrc);
//Add sdp stuff
sdp += "a=ssrc-group:FID " + ssrc + " " + rtx + "\r\n" +
"a=ssrc:" + ssrc + " cname:" + cname + "\r\n" +
"a=ssrc:" + ssrc + " msid:" + msid + "\r\n" +
"a=ssrc:" + ssrc + " mslabel:" + mslabel + "\r\n" +
"a=ssrc:" + ssrc + " label:" + label + "\r\n" +
"a=ssrc:" + rtx + " cname:" + cname + "\r\n" +
"a=ssrc:" + rtx + " msid:" + msid + "\r\n" +
"a=ssrc:" + rtx + " mslabel:" + mslabel + "\r\n" +
"a=ssrc:" + rtx + " label:" + label + "\r\n";
}
//Conference flag
sdp += "a=x-google-flag:conference\r\n";
//Add SIM group
sdp += "a=ssrc-group:SIM " + ssrcs.join(" ") + "\r\n";
//Update sdp in offer without the rid stuff
offer.sdp = sdp;
//Add RID equivalent to send it to the sfu
sdp += "a=simulcast:send a;b;c\r\n";
sdp += "a=rid:a send ssrc="+ssrcs[2]+"\r\n";
sdp += "a=rid:b send ssrc="+ssrcs[1]+"\r\n";
sdp += "a=rid:c send ssrc="+ssrcs[0]+"\r\n";
//Disable third row
//document.querySelector("tr[data-rid='c']").style.display = 'none';
//Doing mungling
sdpMungling = true;
} catch(e) {
console.error(e);
}
//Set it
pc.setLocalDescription(offer);
console.log(sdp);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
//Select simulcast layer
ws.send(JSON.stringify({
cmd : "SELECT_LAYER",
rid : "b",
spatialLayerId : 0,
temporalLayerId : 2
}));
})
.catch(function(error){
console.error("Error",error);
});
};
ws.onmessage = function(event){
console.log(event);
//Get protocol message
const msg = JSON.parse(event.data);
//Get sdp
let sdp = msg.answer;
//If offer was simulcast 04
if (simulcast_03)
//Conver it back
sdp = sdp.replace(": recv rid=",":recv ");
//if doing mungling
if (sdpMungling)
//Add custom flag and remove simulcast attirbute
sdp = sdp.replace(/a=sim.*\r\n/,"") + "a=x-google-flag:conference\r\n";
console.log(sdp);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
//Convert from simulcast to simulcast_03
sdp: sdp
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
var old = document.querySelector ('.mdl-button--colored');
var listener = function(event)
{
//Get data
var rid = event.target.dataset["rid"];
var temporalLayerId = event.target.dataset["tid"];
//Select simulcast layer
ws.send(JSON.stringify({
cmd : "SELECT_LAYER",
rid : rid,
spatialLayerId : 0,
temporalLayerId : temporalLayerId
}));
//Remove
event.target.classList.add("mdl-button--colored");
old.classList.remove("mdl-button--colored");
old = event.target;
};
var buttons = document.querySelectorAll('button');
for (var i = 0; i < buttons.length; i++)
buttons[i].addEventListener("click",listener);
};
}
var dialog = document.querySelector('dialog');
if (dialog.showModal)
{
dialog.showModal();
dialog.querySelector('.ready').addEventListener('click', function() {
dialog.close();
connect();
});
} else {
connect();
}

View File

@@ -0,0 +1,297 @@
<html>
<head>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,500,700" type="text/css">
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
<link rel="stylesheet" href="https://code.getmdl.io/1.3.0/material.indigo-pink.min.css">
<script defer src="https://code.getmdl.io/1.3.0/material.min.js"></script>
<style>
html {
zoom: 90%;
}
body {
background: #e2e1e0;
text-align: center;
margin: 0px;
padding: 0px;
font-size: 9px;
color: #555;
font-family: Roboto;
text-align: -webkit-center;
}
table {
margin: 10px;
position: relative;
left: -40px;
}
video {
object-fit: cover;
float: left;
background: #fff;
border-radius: 2px;
display: inline-block;
margin: 1rem;
position: relative;
width: 420px;
box-shadow: 0 1px 3px rgba(0,0,0,0.12), 0 1px 2px rgba(0,0,0,0.24);
transition: all 0.5s cubic-bezier(.25,.8,.25,1);
padding:1px;
bottom: 0px;
height: 315px;
}
#container {
top: 10px;
left: 10px;
margin: 0px;
padding: 0px;
width: 900px;
}
.container-video {
width: 50%;
float: left;
display: inline-block;
position: relative;
font-size: 24px;
top: 5px;
margin-bottom: 10px;
}
.gaugeChartContainer {
position: relative;
width: 190px;
height: 120px;
float: left;
padding: 10px;
}
.gaugeChart {
position: relative;
text-align: center;
}
.gaugeChart canvas {
position: absolute;
top: 0;
left: 0;
z-index: 0;
}
.gaugeChartLabel {
display: inline-block;
position: absolute;
float: left;
left: 0;
top: 55px;
width: 100%;
text-align: center;
color: #FFFFFF;
font-size: 24px;
font-weight: bold;
z-index: 1;
text-shadow: #333 0px 0px 2px;
}
.gaugeChartContainer {
position: relative;
font-size: 9px;
}
.gaugeChartTitle {
display: inline-block;
position: absolute;
float: left;
top: 0px;
left: 0;
width: 100%;
text-align: center;
color: #888;
font-weight: bold;
font-size: 12px;
}
.gaugeChartMin {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 92%;
margin-left: 8%;
text-align: left;
color: #888;
font-weight: bold;
}
.gaugeChartMax {
display: inline-block;
position: absolute;
float: left;
left: 0;
bottom: 10%;
width: 95%;
margin-right: 5%;
text-align: right;
color: #888;
font-weight: bold;
}
td {
margin: 5px;
padding: 5px;
text-align: center;
}
.ready-dialog
{
width: 780px;
text-align: left;
}
.ready-dialog p
{
color: black;
font-size: 12pt;
}
.ready-dialog code
{
font-size: 12pt;
}
</style>
<script src="../js/gauge.min.js" type="text/javascript"></script>
</head>
<body>
<div id="container">
<div class="container-video">LOCAL
<video id="local"></video>
</div>
<div class="container-video">REMOTE
<video id="remote"></video>
</div>
<table>
<tr>
<td></td>
<td>FPS/4</td>
<td>FPS/2</td>
<td>FPS</td>
</tr>
<tr>
<td>320x240</td>
<td><button data-sid="0" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer S0 T0</button></td>
<td><button data-sid="0" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer S0 T1</button></td>
<td><button data-sid="0" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised">Layer S0 T2</button></td>
</tr>
<tr>
<td>640x480</td>
<td><button data-sid="1" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer S1 T0</button></td>
<td><button data-sid="1" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer S1 T1</button></td>
<td><button data-sid="1" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised">Layer S1 T2</button></td>
</tr>
<tr>
<td>1280x960</td>
<td><button data-sid="2" data-tid="0" class="mdl-button mdl-js-button mdl-button--raised">Layer S2 T0</button></td>
<td><button data-sid="2" data-tid="1" class="mdl-button mdl-js-button mdl-button--raised">Layer S2 T1</button></td>
<td><button data-sid="2" data-tid="2" class="mdl-button mdl-js-button mdl-button--raised">Layer S2 T2</button></td>
</tr>
</table>
<div class="gaugeChartContainer">
<canvas id="g_s_w" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_w"class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent Width</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax"></span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_h" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_h" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent Height</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax"></span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_f" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_f" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent FPS</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">30</span>
</div>
<div class="gaugeChartContainer">
<canvas id="g_s_b" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_b" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Sent kbps</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">2048</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_w" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_w" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv Width</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax"></span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_h" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_h" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv Heigth</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax"></span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_f" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_f" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv FPS</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">30</span>
</div>
<div class="gaugeChartContainer">
<canvas id="r_s_b" class="gaugeChart" width="192" height="135" style="width: 160px; height: 100px;"></canvas>
<span id="s_s_b" class="gaugeChartLabel">- -</span>
<span class="gaugeChartTitle">Recv kbps</span>
<span class="gaugeChartMin">0</span>
<span class="gaugeChartMax">2048</span>
</div>
</div>
<dialog class="ready-dialog mdl-dialog">
<h4 class="mdl-dialog__title">Ready to test VP9 SVC layer selection?</h4>
<div class="mdl-dialog__content">
<p>
This demo allows you to test the VP9 SVC layer selection which will be a key functionality of the <a href="https://github.com/medooze/sfu">Medooze SFU</a>.
</p>
<p>
When SVC is enabled on latest Chrome version, it will send 3 different Temporal Layers and 2 Spatial layers. Chrome will send a single media stream to our SFU containing all SVC layers, and you will be able to select which layers the SFU send backs to you.
</p>
<p>
By swithcing between the different layers you will be able to retrieve different sizes and fps from a single encoding, lowering the bandwith requirements without requiring extra processing power.
</p>
<p>
<b>Important:</b> This demo only works only works on Google Canary and you must enable VP9 SVC by running it with the following command line
</p>
<pre>
<code>
chrome.exe --force-fieldtrials=WebRTC-SupportVP9SVC/EnabledByFlag_2SL3TL
</code>
</pre>
</div>
<div class="mdl-dialog__actions">
<button type="button" class="ready mdl-button mdl-button mdl-button--raised mdl-button--accent">Ready!</button>
</div>
</dialog>
</body>
</html>
<script src="js/svc.js" type="text/javascript"></script>

View File

@@ -0,0 +1,340 @@
const url = "wss://"+window.location.hostname+":"+window.location.port;
let videoResolution = true;
//Get our url
const href = new URL (window.location.href);
if (href.searchParams.has ("video"))
switch (href.searchParams.get ("video").toLowerCase ())
{
case "1080p":
videoResolution = {
width: {min: 1920, max: 1920},
height: {min: 1080, max: 1080},
};
break;
case "720p":
videoResolution = {
width: {min: 1280, max: 1280},
height: {min: 720, max: 720},
};
break;
case "576p":
videoResolution = {
width: {min: 720, max: 720},
height: {min: 576, max: 576},
};
break;
case "480p":
videoResolution = {
width: {min: 640, max: 640},
height: {min: 480, max: 480},
};
break;
case "no":
videoResolution = false;
break;
}
var opts = {
lines: 12, // The number of lines to draw
angle: 0.15, // The length of each line
lineWidth: 0.44, // 0.44 The line thickness
pointer: {
length: 0.8, // 0.9 The radius of the inner circle
strokeWidth: 0.035, // The rotation offset
color: '#A0A0A0' // Fill color
},
limitMax: true,
colorStart: '#28c1d1', // Colors
colorStop: '#28c1d1', // just experiment with them
strokeColor: '#F0F0F0', // to see which ones work best for you
generateGradient: false,
gradientType: 0
};
var targets = document.querySelectorAll('.gaugeChart'); // your canvas element
var gauges = [];
for (var i=0;i<targets.length;++i)
{
gauges[i] = new Gauge(targets[i]).setOptions (opts); // create sexy gauge!
gauges[i].animationSpeed = 10000; // set animation speed (32 is default value)
gauges[i].set (0); // set actual value
}
var texts = document.querySelectorAll('.gaugeChartLabel');
var max = document.querySelectorAll('.gaugeChartMax');
max[0].innerText = gauges[0].maxValue = videoResolution.width ? videoResolution.width.max : 640;
max[1].innerText = gauges[1].maxValue = videoResolution.height ? videoResolution.height.max : 480;
max[2].innerText = gauges[2].maxValue = 30;
max[3].innerText = gauges[3].maxValue = 2048;
max[4].innerText = gauges[4].maxValue = videoResolution.width ? videoResolution.width.max : 640;
max[5].innerText = gauges[5].maxValue = videoResolution.height ? videoResolution.height.max : 480;
max[6].innerText = gauges[6].maxValue = 30;
max[7].innerText = gauges[7].maxValue = 2048;
var texts = document.querySelectorAll('.gaugeChartLabel');
var max = document.querySelectorAll('.gaugeChartLabel');
function addVideoForStream(stream,muted)
{
//Create new video element
const video = document.querySelector (muted ? "#local" : "#remote");
//Set same id
video.streamid = stream.id;
//Set src stream
video.srcObject = stream;
//Set other properties
video.autoplay = true;
video.muted = muted;
}
//Get user media promise based
function getUserMedia(constrains)
{
return new Promise(function(resolve,reject) {
//Get it
navigator.getUserMedia(constrains,
function(stream){
resolve(stream);
},
function(error){
reject(error);
});
});
}
var sdp;
var pc;
function connect()
{
//Create PC
pc = new RTCPeerConnection();
var ws = new WebSocket(url,"svc");
pc.onaddstream = function(event) {
var prev = 0,prevFrames = 0,prevBytes = 0;
console.debug("onAddStream",event);
//Play it
addVideoForStream(event.stream);
//Get track
var track = event.stream.getVideoTracks()[0];
//Update stats
setInterval(async function(){
var results;
try {
//For ff
results = await pc.getStats(track);
} catch(e) {
//For chrome
results = await pc.getStats();
}
var width = track.width || remote.videoWidth;
var height = track.height || remote.videoHeight;
//Get results
for (let result of results.values())
{
if (result.type==="inbound-rtp")
{
//Get timestamp delta
var delta = result.timestamp-prev;
//Store this ts
prev = result.timestamp;
//Get values
var fps = (result.framesDecoded-prevFrames)*1000/delta;
var kbps = (result.bytesReceived-prevBytes)*8/delta;
//Store last values
prevFrames = result.framesDecoded;
prevBytes = result.bytesReceived;
//If first
if (delta==result.timestamp || isNaN(fps) || isNaN (kbps))
return;
for (var i=4;i<targets.length;++i)
gauges[i].animationSpeed = 10000000; // set animation speed (32 is default value)
gauges[6].set(Math.min(Math.floor(fps) ,30));
gauges[7].set(Math.min(Math.floor(kbps) ,gauges[7].maxValue));
texts[6].innerText = Math.floor(fps);
texts[7].innerText = Math.floor(kbps);
} else if (result.type==="track") {
//Update stats
width = result.frameWidth;
height = result.frameHeight;
}
}
gauges[4].set(width);
gauges[5].set(height);
texts[4].innerText = width;
texts[5].innerText = height;
},1000);
};
ws.onopen = function(){
console.log("opened");
navigator.mediaDevices.getUserMedia({
audio: false,
video: videoResolution
})
.then(function(stream){
var prev = 0;
var prevFrames = 0;
var prevBytes = 0;
var track = stream.getVideoTracks()[0];
console.debug("getUserMedia sucess",stream);
//Play it
addVideoForStream(stream,true);
//Update stats
setInterval(async function(){
var results;
try {
//For ff
results = await pc.getStats(track);
} catch(e) {
//For chrome
results = await pc.getStats();
}
var width = track.width || local.videoWidth;//result.stat("googFrameWidthReceived");
var height = track.height || local.videoHeight;//result.stat("googFrameHeightReceived");
//Get results
for (let result of results.values())
{
if (result.type==="outbound-rtp")
{
//Get timestamp delta
var delta = result.timestamp-prev;
//Store this ts
prev = result.timestamp;
//Get values
var fps = ((result.framesEncoded-prevFrames)*1000/delta);
var kbps = (result.bytesSent-prevBytes)*8/delta;
//Store last values
prevFrames = result.framesEncoded;
prevBytes = result.bytesSent;
//If first
if (delta==result.timestamp || isNaN(fps) || isNaN (kbps))
return;
for (var i=0;i<4;++i)
gauges[i].animationSpeed = 10000000; // set animation speed (32 is default value)
gauges[2].set(Math.min(Math.floor(fps) ,30));
gauges[3].set(Math.min(Math.floor(kbps) ,gauges[3].maxValue));
texts[2].innerText = Math.floor(fps);
texts[3].innerText = Math.floor(kbps);
} else if (result.type==="track") {
//Update stats
width = result.frameWidth;
height = result.frameHeight;
}
}
gauges[0].maxValue = Math.max(gauges[0].maxValue,width);
gauges[1].maxValue = Math.max(gauges[1].maxValue,height);
gauges[4].maxValue = Math.max(gauges[4].maxValue,width);
gauges[5].maxValue = Math.max(gauges[5].maxValue,height);
gauges[0].set(width);
gauges[1].set(height);
texts[0].innerText = width;
texts[1].innerText = height;
},1000);
//Add stream to peer connection
pc.addStream(stream);
//Create new offer
return pc.createOffer();
})
.then(function(offer){
console.debug("createOffer sucess",offer);
//We have sdp
sdp = offer.sdp;
//Set it
pc.setLocalDescription(offer);
console.log(sdp);
//Create room
ws.send(JSON.stringify({
cmd : "OFFER",
offer : sdp
}));
//Select simulcast layer
ws.send(JSON.stringify({
cmd : "SELECT_LAYER",
spatialLayerId : 0,
temporalLayerId : 0
}));
})
.catch(function(error){
console.error("Error",error);
});
};
ws.onmessage = function(event){
console.log(event);
//Get protocol message
const msg = JSON.parse(event.data);
if (msg.answer)
{
console.log(msg.answer);
pc.setRemoteDescription(new RTCSessionDescription({
type:'answer',
sdp: msg.answer
}), function () {
console.log("JOINED");
}, function (err) {
console.error("Error joining",err);
}
);
var listener = function(event)
{
//Get previous selected
var old = document.querySelector ('.mdl-button--colored');
//Get data
var spatialLayerId = event.target.dataset["sid"];
var temporalLayerId = event.target.dataset["tid"];
//Send event
//Create room
ws.send(JSON.stringify({
cmd : "SELECT_LAYER",
spatialLayerId : spatialLayerId,
temporalLayerId : temporalLayerId
}));
//Remove
event.target.classList.add("mdl-button--colored");
old.classList.remove("mdl-button--colored");
};
var buttons = document.querySelectorAll('button');
for (var i = 0; i < buttons.length; i++)
buttons[i].addEventListener("click",listener);
} else {
var spatialLayerId = msg.sid;
var temporalLayerId = msg.tid;
//Get divs
var old = document.querySelector (".mdl-button--colored");
var selected = document.querySelector ("button[data-sid='"+spatialLayerId+"'][data-tid='"+temporalLayerId+"'");
//Update
selected.classList.add("mdl-button--colored");
old.classList.remove("mdl-button--colored");
}
};
}
var dialog = document.querySelector('dialog');
if (dialog.showModal)
{
dialog.showModal();
dialog.querySelector('.ready').addEventListener('click', function() {
dialog.close();
connect();
});
} else {
connect();
}