1084 lines
35 KiB
JavaScript
1084 lines
35 KiB
JavaScript
const socket = io();
|
|
const localVideo = document.getElementById('localVideo');
|
|
const remoteVideo = document.getElementById('remoteVideo');
|
|
const statusDiv = document.getElementById('status');
|
|
const streamListUl = document.getElementById('streamList');
|
|
const lobbyView = document.getElementById('lobby-view');
|
|
const streamView = document.getElementById('stream-view');
|
|
const streamTitle = document.getElementById('currentStreamTitle');
|
|
const upstreamMetricsBox = document.getElementById('upstreamMetrics');
|
|
const downstreamMetricsBox = document.getElementById('downstreamMetrics');
|
|
|
|
// --- Global State ---
|
|
let localStream = null;
|
|
let iceServers = [];
|
|
const useDataChannelVideo = true; // single-encode fanout
|
|
let isHeadNode = false;
|
|
|
|
// Datachannel video pipeline
|
|
let videoProcessor;
|
|
let videoReader;
|
|
let videoEncoder;
|
|
let videoDecoder;
|
|
let videoGenerator;
|
|
let videoWriter;
|
|
let videoDecoderConfigured = false;
|
|
let decoderConfigCache = null;
|
|
let upstreamVideoChannel = null;
|
|
const downstreamVideoChannels = new Map();
|
|
let lastVideoChunkTs = 0n;
|
|
const CHUNK_HEADER_SIZE = 14; // 1(type)+1(chunk type)+8(ts)+4(duration)
|
|
let supportedEncoding = [];
|
|
let supportedDecoding = [];
|
|
let supportedCodecs = [];
|
|
let hasReceivedKeyframe = false;
|
|
|
|
// Datachannel metrics (bytes/frames based)
|
|
const dcStats = {
|
|
video: {
|
|
txBytes: 0,
|
|
rxBytes: 0,
|
|
txFrames: 0,
|
|
rxFrames: 0,
|
|
lastSampleTs: null,
|
|
lastTxBytes: 0,
|
|
lastRxBytes: 0,
|
|
lastTxFrames: 0,
|
|
lastRxFrames: 0,
|
|
bitrateKbpsTx: null,
|
|
bitrateKbpsRx: null,
|
|
fpsTx: null,
|
|
fpsRx: null,
|
|
},
|
|
};
|
|
|
|
// Simple backpressure guard
|
|
let encoderBusy = false;
|
|
let decodeQueue = 0;
|
|
|
|
// ACTIVE connections
|
|
let currentUpstreamPC = null;
|
|
let currentDownstreamPC = null;
|
|
let downstreamId = null;
|
|
let origVideoStream,
|
|
displayedVideoStream,
|
|
sentVideoStream,
|
|
origAudioStream,
|
|
displayedAudioStream,
|
|
sentAudioStream,
|
|
origVideoTrack,
|
|
origAudioTrack,
|
|
displayedVideoTrack,
|
|
displayedAudioTrack,
|
|
sentVideoTrack,
|
|
sentAudioTrack;
|
|
|
|
// FADING connections
|
|
let oldUpstreamPCs = [];
|
|
let oldDownstreamPCs = [];
|
|
|
|
// Debug metrics history for bitrate/fps deltas
|
|
const metricsHistory = {
|
|
upstream: null,
|
|
downstream: null,
|
|
};
|
|
|
|
// --- 1. Initialization ---
|
|
async function init() {
|
|
try {
|
|
const response = await fetch('/api/get-turn-credentials');
|
|
const data = await response.json();
|
|
console.log('TURN data:', data);
|
|
iceServers = data.iceServers;
|
|
} catch (e) {
|
|
iceServers = [{ urls: 'stun:stun.l.google.com:19302' }];
|
|
}
|
|
|
|
// test codecs
|
|
const codecs = ['avc1.420034', 'hvc1.1.6.L123.00', 'vp8', 'vp09.00.10.08', 'av01.0.04M.08'];
|
|
const accelerations = ['prefer-hardware', 'prefer-software'];
|
|
|
|
const configs = [];
|
|
for (const codec of codecs) {
|
|
for (const acceleration of accelerations) {
|
|
configs.push({
|
|
codec,
|
|
hardwareAcceleration: acceleration,
|
|
width: 1920,
|
|
height: 1080,
|
|
bitrate: 4_000_000,
|
|
bitrateMode: 'variable',
|
|
framerate: 30,
|
|
});
|
|
}
|
|
}
|
|
|
|
for (const config of configs) {
|
|
const support = await VideoEncoder.isConfigSupported(config);
|
|
console.log(`VideoEncoder's config ${JSON.stringify(support.config)} support: ${support.supported}`);
|
|
if (support.supported)
|
|
supportedEncoding.push({ codec: config.codec, hardwareAcceleration: config.hardwareAcceleration });
|
|
}
|
|
|
|
for (const config of configs) {
|
|
const support = await VideoDecoder.isConfigSupported(config);
|
|
console.log(`VideoDecoder's config ${JSON.stringify(support.config)} support: ${support.supported}`);
|
|
if (support.supported)
|
|
supportedEncoding.push({ codec: config.codec, hardwareAcceleration: config.hardwareAcceleration });
|
|
}
|
|
|
|
supportedCodecs = supportedEncoding.filter((enc) =>
|
|
supportedDecoding.some((dec) => dec.codec === enc.codec && dec.hardwareAcceleration === enc.hardwareAcceleration)
|
|
);
|
|
console.log('Supported codecs for datachannel video:', supportedCodecs);
|
|
}
|
|
init();
|
|
|
|
// --- 2. Lobby Logic ---
|
|
|
|
// Receive list of streams from server
|
|
socket.on('stream_list_update', (streams) => {
|
|
streamListUl.innerHTML = '';
|
|
if (streams.length === 0) {
|
|
streamListUl.innerHTML = "<li style='cursor:default; color:#777;'>No active streams. Start one!</li>";
|
|
return;
|
|
}
|
|
|
|
streams.forEach((name) => {
|
|
const li = document.createElement('li');
|
|
li.innerText = `${name}`;
|
|
li.onclick = () => joinStream(name);
|
|
streamListUl.appendChild(li);
|
|
});
|
|
});
|
|
|
|
async function startStream() {
|
|
const name = document.getElementById('streamNameInput').value;
|
|
const fileInput = document.getElementById('videoFileInput');
|
|
const file = fileInput.files[0];
|
|
|
|
if (!name) return alert('Please enter a name');
|
|
|
|
try {
|
|
if (file) {
|
|
// --- OPTION A: VIDEO FILE MODE ---
|
|
console.log('Starting stream from video file...');
|
|
|
|
// 1. Create a URL for the local file
|
|
const fileURL = URL.createObjectURL(file);
|
|
|
|
// 2. Set it to the local video element
|
|
localVideo.src = fileURL;
|
|
localVideo.loop = true; // <--- HERE IS THE LOOP LOGIC
|
|
localVideo.muted = false; // Must be unmuted to capture audio, use headphones!
|
|
localVideo.volume = 1.0;
|
|
|
|
// 3. Play the video (required before capturing)
|
|
await localVideo.play();
|
|
|
|
// 4. Capture the stream from the video element
|
|
// (Chrome/Edge use captureStream, Firefox uses mozCaptureStream)
|
|
if (localVideo.captureStream) {
|
|
localStream = localVideo.captureStream();
|
|
} else {
|
|
return alert('Your browser does not support capturing video from files.');
|
|
}
|
|
|
|
// Note: captureStream() sometimes doesn't capture audio if the element is muted.
|
|
// If you want to stream audio, you must hear it locally too.
|
|
} else {
|
|
// --- OPTION B: WEBCAM MODE ---
|
|
console.log('Starting stream from Webcam...');
|
|
localStream = await navigator.mediaDevices.getUserMedia({
|
|
video: { width: { ideal: 4096 } },
|
|
audio: true,
|
|
});
|
|
localVideo.srcObject = localStream.clone();
|
|
localVideo.muted = true; // Mute local webcam to avoid feedback loop
|
|
}
|
|
|
|
// --- COMMON LOGIC ---
|
|
|
|
// UI Switch
|
|
lobbyView.style.display = 'none';
|
|
streamView.style.display = 'block';
|
|
remoteVideo.style.display = 'none'; // Broadcaster doesn't see remote
|
|
streamTitle.innerText = `Broadcasting: ${name}`;
|
|
|
|
isHeadNode = true;
|
|
if (useDataChannelVideo) {
|
|
await startHeadVideoEncoding();
|
|
}
|
|
|
|
socket.emit('start_stream', name);
|
|
statusDiv.innerText = `Status: Broadcasting (Head) | ID: ${socket.id}`;
|
|
} catch (err) {
|
|
console.error(err);
|
|
alert('Failed to start stream: ' + err.message);
|
|
}
|
|
}
|
|
|
|
function joinStream(name) {
|
|
// UI Switch
|
|
lobbyView.style.display = 'none';
|
|
streamView.style.display = 'block';
|
|
localVideo.style.display = 'none'; // Viewers don't see themselves
|
|
streamTitle.innerText = `Watching: ${name}`;
|
|
|
|
socket.emit('join_stream', name);
|
|
statusDiv.innerText = `Status: Joining chain... | ID: ${socket.id}`;
|
|
}
|
|
|
|
function leaveStream() {
|
|
location.reload(); // Simple way to reset state and go back to lobby
|
|
}
|
|
|
|
// --- Datachannel Video Helpers ---
|
|
async function startHeadVideoEncoding() {
|
|
if (typeof VideoEncoder === 'undefined') {
|
|
alert('VideoEncoder API not supported in this browser.');
|
|
return;
|
|
}
|
|
if (!localStream) return;
|
|
const videoTrack = localStream.getVideoTracks()[0];
|
|
if (!videoTrack) return;
|
|
|
|
cleanupDataChannelVideo();
|
|
|
|
videoProcessor = new MediaStreamTrackProcessor({ track: videoTrack });
|
|
videoReader = videoProcessor.readable.getReader();
|
|
|
|
const settings = videoTrack.getSettings();
|
|
const width = settings.width || 1920;
|
|
const height = settings.height || 1080;
|
|
const framerate = settings.frameRate || 30;
|
|
|
|
const candidates = supportedCodecs.map((s) => ({
|
|
codec: s.codec,
|
|
hardwareAcceleration: s.hardwareAcceleration,
|
|
width,
|
|
height,
|
|
bitrate: 4_000_000,
|
|
bitrateMode: 'variable',
|
|
framerate,
|
|
}));
|
|
|
|
let chosenConfig = null;
|
|
for (const cfg of candidates) {
|
|
const support = await VideoEncoder.isConfigSupported(cfg);
|
|
console.log('Encoder config support check', JSON.stringify(cfg.codec, null, 2), JSON.stringify(support, null, 2));
|
|
if (support.supported) {
|
|
chosenConfig = support.config;
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (!chosenConfig) {
|
|
// fall back to vp09.00.10.08 software
|
|
chosenConfig = {
|
|
codec: 'vp09.00.10.08',
|
|
hardwareAcceleration: 'prefer-software',
|
|
width,
|
|
height,
|
|
bitrate: 4_000_000,
|
|
bitrateMode: 'variable',
|
|
framerate,
|
|
};
|
|
}
|
|
|
|
videoEncoder = new VideoEncoder({
|
|
output: handleEncodedChunk,
|
|
error: (e) => console.error('VideoEncoder error', e),
|
|
});
|
|
|
|
videoEncoder.configure(chosenConfig);
|
|
decoderConfigCache = {
|
|
codec: chosenConfig.codec,
|
|
codedWidth: chosenConfig.width,
|
|
codedHeight: chosenConfig.height,
|
|
hardwareAcceleration: 'prefer-hardware',
|
|
};
|
|
|
|
pumpVideoEncodeLoop();
|
|
}
|
|
|
|
async function pumpVideoEncodeLoop() {
|
|
if (!videoReader || !videoEncoder) return;
|
|
let frameCount = 0;
|
|
while (videoReader && videoEncoder && videoEncoder.state === 'configured') {
|
|
const { value: frame, done } = await videoReader.read();
|
|
if (done || !frame) break;
|
|
const keyFrame = frameCount % 60 === 0; // force a keyframe roughly every 2s at 30fps
|
|
try {
|
|
videoEncoder.encode(frame, { keyFrame });
|
|
} catch (err) {
|
|
console.warn('encode failed', err);
|
|
frame.close();
|
|
break;
|
|
}
|
|
frame.close();
|
|
frameCount += 1;
|
|
}
|
|
}
|
|
|
|
function handleEncodedChunk(chunk, metadata) {
|
|
if (metadata && metadata.decoderConfig) {
|
|
decoderConfigCache = metadata.decoderConfig;
|
|
broadcastDecoderConfig();
|
|
configureDecoderIfNeeded(decoderConfigCache);
|
|
}
|
|
|
|
const payload = packEncodedChunk(chunk);
|
|
dcStats.video.txBytes += payload.byteLength;
|
|
dcStats.video.txFrames += 1;
|
|
broadcastVideoPayload(payload);
|
|
}
|
|
|
|
function packEncodedChunk(chunk) {
|
|
const total = CHUNK_HEADER_SIZE + chunk.byteLength;
|
|
const buf = new ArrayBuffer(total);
|
|
const view = new DataView(buf);
|
|
view.setUint8(0, 1); // message type: chunk
|
|
view.setUint8(1, chunk.type === 'key' ? 1 : 0);
|
|
view.setBigUint64(2, BigInt(chunk.timestamp));
|
|
view.setUint32(10, chunk.duration ? Number(chunk.duration) : 0);
|
|
chunk.copyTo(new Uint8Array(buf, CHUNK_HEADER_SIZE));
|
|
lastVideoChunkTs = BigInt(chunk.timestamp);
|
|
return buf;
|
|
}
|
|
|
|
function broadcastDecoderConfig() {
|
|
if (!decoderConfigCache) return;
|
|
const message = JSON.stringify({ type: 'config', config: decoderConfigCache });
|
|
downstreamVideoChannels.forEach((dc) => {
|
|
if (dc.readyState === 'open') dc.send(message);
|
|
});
|
|
}
|
|
|
|
function broadcastVideoPayload(payload, exclude) {
|
|
downstreamVideoChannels.forEach((dc) => {
|
|
if (dc === exclude) return;
|
|
if (dc.readyState === 'open') dc.send(payload);
|
|
});
|
|
}
|
|
|
|
function setupUpstreamVideoChannel(channel) {
|
|
upstreamVideoChannel = channel;
|
|
channel.binaryType = 'arraybuffer';
|
|
channel.onmessage = async (evt) => {
|
|
await handleVideoDataMessage(evt.data, channel);
|
|
};
|
|
channel.onclose = () => {
|
|
upstreamVideoChannel = null;
|
|
};
|
|
channel.onerror = (e) => console.warn('Upstream video channel error', e);
|
|
}
|
|
|
|
function setupDownstreamVideoChannel(targetId, channel) {
|
|
channel.binaryType = 'arraybuffer';
|
|
channel.onopen = () => {
|
|
downstreamVideoChannels.set(targetId, channel);
|
|
if (decoderConfigCache) channel.send(JSON.stringify({ type: 'config', config: decoderConfigCache }));
|
|
};
|
|
channel.onclose = () => {
|
|
downstreamVideoChannels.delete(targetId);
|
|
};
|
|
channel.onerror = (e) => console.warn('Downstream video channel error', e);
|
|
}
|
|
|
|
async function handleVideoDataMessage(data, inboundChannel) {
|
|
if (typeof data === 'string') {
|
|
try {
|
|
const msg = JSON.parse(data);
|
|
if (msg.type === 'config') {
|
|
decoderConfigCache = msg.config;
|
|
configureDecoderIfNeeded(decoderConfigCache);
|
|
broadcastDecoderConfig();
|
|
}
|
|
} catch (err) {
|
|
console.warn('Failed to parse video config', err);
|
|
}
|
|
return;
|
|
}
|
|
|
|
const buffer = data instanceof ArrayBuffer ? data : await data.arrayBuffer();
|
|
const chunk = unpackEncodedChunk(buffer);
|
|
if (!chunk) return;
|
|
if (!hasReceivedKeyframe && chunk.type === 'delta') {
|
|
// Ignore delta frames until we see a keyframe after configure/reset
|
|
return;
|
|
}
|
|
if (chunk.type === 'key') {
|
|
hasReceivedKeyframe = true;
|
|
}
|
|
dcStats.video.rxBytes += buffer.byteLength;
|
|
dcStats.video.rxFrames += 1;
|
|
// If we have downstreams, count this as outbound too (relay fanout)
|
|
if (downstreamVideoChannels.size > 0) {
|
|
dcStats.video.txBytes += buffer.byteLength;
|
|
dcStats.video.txFrames += 1;
|
|
}
|
|
if (videoDecoder) {
|
|
decodeQueue += 1;
|
|
videoDecoder.decode(chunk);
|
|
}
|
|
broadcastVideoPayload(buffer, inboundChannel);
|
|
}
|
|
|
|
function unpackEncodedChunk(buffer) {
|
|
if (!buffer || buffer.byteLength <= CHUNK_HEADER_SIZE) return null;
|
|
const view = new DataView(buffer);
|
|
if (view.getUint8(0) !== 1) return null;
|
|
const chunkType = view.getUint8(1) === 1 ? 'key' : 'delta';
|
|
const timestamp = Number(view.getBigUint64(2));
|
|
const duration = view.getUint32(10);
|
|
const data = new Uint8Array(buffer, CHUNK_HEADER_SIZE);
|
|
return new EncodedVideoChunk({ type: chunkType, timestamp, duration, data });
|
|
}
|
|
|
|
async function configureDecoderIfNeeded(config) {
|
|
if (typeof VideoDecoder === 'undefined') {
|
|
console.warn('VideoDecoder API not supported');
|
|
return;
|
|
}
|
|
if (videoDecoderConfigured || !config) return;
|
|
videoDecoder = new VideoDecoder({
|
|
output: handleDecodedFrame,
|
|
error: (e) => console.error('VideoDecoder error', e),
|
|
});
|
|
|
|
const support = await VideoDecoder.isConfigSupported(config).catch(() => ({ supported: false }));
|
|
if (!support.supported) {
|
|
console.warn('Decoder config unsupported', config);
|
|
return;
|
|
}
|
|
videoDecoder.configure(config);
|
|
videoDecoderConfigured = true;
|
|
hasReceivedKeyframe = false;
|
|
ensureRenderedStream();
|
|
}
|
|
|
|
function ensureRenderedStream() {
|
|
if (videoGenerator && videoWriter) return;
|
|
videoGenerator = new MediaStreamTrackGenerator({ kind: 'video' });
|
|
videoWriter = videoGenerator.writable.getWriter();
|
|
let ms = remoteVideo.srcObject instanceof MediaStream ? remoteVideo.srcObject : new MediaStream();
|
|
try {
|
|
ms.addTrack(videoGenerator);
|
|
} catch (e) {
|
|
ms = new MediaStream([videoGenerator]);
|
|
}
|
|
remoteVideo.srcObject = ms;
|
|
remoteVideo.style.display = 'block';
|
|
}
|
|
|
|
function handleDecodedFrame(frame) {
|
|
decodeQueue = Math.max(0, decodeQueue - 1);
|
|
if (!videoWriter) {
|
|
frame.close();
|
|
return;
|
|
}
|
|
videoWriter.write(frame).catch((err) => {
|
|
console.warn('Failed to write decoded frame', err);
|
|
frame.close();
|
|
});
|
|
}
|
|
|
|
function cleanupDataChannelVideo() {
|
|
if (videoReader) {
|
|
try {
|
|
videoReader.cancel();
|
|
} catch (e) {}
|
|
}
|
|
videoReader = null;
|
|
videoProcessor = null;
|
|
if (videoEncoder) {
|
|
try {
|
|
videoEncoder.flush().catch(() => {});
|
|
videoEncoder.close();
|
|
} catch (e) {}
|
|
}
|
|
videoEncoder = null;
|
|
if (videoDecoder) {
|
|
try {
|
|
videoDecoder.close();
|
|
} catch (e) {}
|
|
}
|
|
videoDecoder = null;
|
|
videoDecoderConfigured = false;
|
|
decoderConfigCache = null;
|
|
hasReceivedKeyframe = false;
|
|
if (videoWriter) {
|
|
try {
|
|
videoWriter.close();
|
|
} catch (e) {}
|
|
}
|
|
videoWriter = null;
|
|
videoGenerator = null;
|
|
if (upstreamVideoChannel) {
|
|
upstreamVideoChannel.close();
|
|
upstreamVideoChannel = null;
|
|
}
|
|
downstreamVideoChannels.forEach((dc) => dc.close());
|
|
downstreamVideoChannels.clear();
|
|
lastVideoChunkTs = 0n;
|
|
}
|
|
|
|
// --- 3. Health Reporting (Unchanged) ---
|
|
setInterval(calculateAndReportHealth, 5000);
|
|
setInterval(pollPeerMetrics, 2000);
|
|
|
|
async function calculateAndReportHealth() {
|
|
if (useDataChannelVideo) {
|
|
socket.emit('update_score', 100);
|
|
return;
|
|
}
|
|
if (localStream) {
|
|
socket.emit('update_score', 100);
|
|
return;
|
|
}
|
|
if (!currentUpstreamPC) return;
|
|
|
|
try {
|
|
const stats = await currentUpstreamPC.getStats();
|
|
let packetsLost = 0;
|
|
let packetsReceived = 0;
|
|
let jitter = 0;
|
|
|
|
stats.forEach((report) => {
|
|
if (report.type === 'inbound-rtp' && report.kind === 'video') {
|
|
packetsLost = report.packetsLost || 0;
|
|
packetsReceived = report.packetsReceived || 0;
|
|
jitter = report.jitter || 0;
|
|
}
|
|
});
|
|
|
|
const totalPackets = packetsReceived + packetsLost;
|
|
if (totalPackets === 0) return;
|
|
|
|
const lossRate = packetsLost / totalPackets;
|
|
const score = 100 - lossRate * 100 * 5 - jitter * 100;
|
|
const finalScore = Math.max(0, Math.min(100, score));
|
|
console.log(`Health: ${finalScore.toFixed(0)} | Loss: ${(lossRate * 100).toFixed(2)}%`);
|
|
|
|
socket.emit('update_score', finalScore);
|
|
} catch (e) {}
|
|
}
|
|
|
|
// --- 4. Socket Events (Logic Updated for Smart Disconnect) ---
|
|
|
|
socket.on('connect_to_downstream', async ({ downstreamId: targetId }) => {
|
|
downstreamId = targetId;
|
|
await setupDownstreamConnection(targetId);
|
|
});
|
|
|
|
socket.on('disconnect_downstream', () => {
|
|
if (currentDownstreamPC) {
|
|
currentDownstreamPC.close();
|
|
currentDownstreamPC = null;
|
|
downstreamId = null;
|
|
}
|
|
oldDownstreamPCs.forEach((pc) => pc.close());
|
|
oldDownstreamPCs = [];
|
|
});
|
|
|
|
socket.on('signal_msg', async ({ sender, type, sdp, candidate }) => {
|
|
if (type === 'offer') {
|
|
await handleUpstreamOffer(sender, sdp);
|
|
} else if (type === 'answer') {
|
|
if (currentDownstreamPC && sender === downstreamId) {
|
|
await currentDownstreamPC.setRemoteDescription(new RTCSessionDescription(sdp));
|
|
}
|
|
} else if (type === 'candidate') {
|
|
const ice = new RTCIceCandidate(candidate);
|
|
if (currentDownstreamPC && sender === downstreamId) currentDownstreamPC.addIceCandidate(ice).catch((e) => {});
|
|
if (currentUpstreamPC) currentUpstreamPC.addIceCandidate(ice).catch((e) => {});
|
|
oldUpstreamPCs.forEach((pc) => pc.addIceCandidate(ice).catch((e) => {}));
|
|
oldDownstreamPCs.forEach((pc) => pc.addIceCandidate(ice).catch((e) => {}));
|
|
}
|
|
});
|
|
|
|
socket.on('error_msg', (msg) => {
|
|
alert(msg);
|
|
location.reload();
|
|
});
|
|
|
|
socket.on('stream_ended', () => {
|
|
alert('Stream ended by host');
|
|
location.reload();
|
|
});
|
|
|
|
socket.on('request_keyframe', () => {
|
|
console.log('Network requested keyframe');
|
|
// If we were using Insertable streams, we'd need to handle this.
|
|
// With Standard API, the browser handles PLI automatically.
|
|
});
|
|
|
|
// --- 5. WebRTC Logic (Merged Smart Disconnect) ---
|
|
|
|
async function handleUpstreamOffer(senderId, sdp) {
|
|
const newPC = new RTCPeerConnection({ iceServers });
|
|
|
|
// Safety: If connection hangs, kill old one eventually
|
|
let safetyTimer = setTimeout(() => {
|
|
if (currentUpstreamPC && currentUpstreamPC !== newPC) {
|
|
currentUpstreamPC.close();
|
|
}
|
|
}, 15000);
|
|
|
|
newPC.ondatachannel = (event) => {
|
|
if (event.channel.label === 'video') {
|
|
console.log('Upstream video datachannel attached');
|
|
setupUpstreamVideoChannel(event.channel);
|
|
}
|
|
};
|
|
|
|
newPC.ontrack = async (event) => {
|
|
console.log('Received track from upstream:', event);
|
|
clearTimeout(safetyTimer); // Success!
|
|
|
|
if (event.track.kind === 'video') {
|
|
if (useDataChannelVideo) {
|
|
console.log('Ignoring incoming video track; datachannel video active');
|
|
return;
|
|
}
|
|
origVideoStream = event.streams[0];
|
|
displayedVideoStream = origVideoStream.clone();
|
|
sentVideoStream = origVideoStream.clone();
|
|
|
|
origVideoTrack = event.track;
|
|
displayedVideoTrack = origVideoTrack.clone();
|
|
sentVideoTrack = origVideoTrack.clone();
|
|
} else if (event.track.kind === 'audio') {
|
|
origAudioStream = event.streams[0];
|
|
displayedAudioStream = origAudioStream.clone();
|
|
sentAudioStream = origAudioStream.clone();
|
|
|
|
origAudioTrack = event.track;
|
|
displayedAudioTrack = origAudioTrack.clone();
|
|
sentAudioTrack = origAudioTrack.clone();
|
|
}
|
|
|
|
// Rebuild displayedStream
|
|
const displayedStream = new MediaStream();
|
|
if (displayedVideoTrack) displayedStream.addTrack(displayedVideoTrack);
|
|
if (displayedAudioTrack) displayedStream.addTrack(displayedAudioTrack);
|
|
if (!useDataChannelVideo) {
|
|
remoteVideo.srcObject = displayedStream;
|
|
} else if (displayedAudioTrack) {
|
|
if (!remoteVideo.srcObject) {
|
|
remoteVideo.srcObject = new MediaStream([displayedAudioTrack]);
|
|
} else {
|
|
try {
|
|
remoteVideo.srcObject.addTrack(displayedAudioTrack);
|
|
} catch (e) {
|
|
remoteVideo.srcObject = new MediaStream([displayedAudioTrack]);
|
|
}
|
|
}
|
|
}
|
|
statusDiv.innerText = `Status: Connected | ID: ${socket.id}`;
|
|
|
|
if (currentDownstreamPC) {
|
|
console.log('Relaying new upstream stream to downstream');
|
|
const videoSender = currentDownstreamPC.getSenders().find((s) => s.track && s.track.kind === 'video');
|
|
const audioSender = currentDownstreamPC.getSenders().find((s) => s.track && s.track.kind === 'audio');
|
|
if (videoSender) await videoSender.replaceTrack(sentVideoTrack);
|
|
if (audioSender) await audioSender.replaceTrack(sentAudioTrack);
|
|
}
|
|
|
|
// Smart Disconnect: Old connection dies immediately upon success
|
|
if (currentUpstreamPC && currentUpstreamPC !== newPC) {
|
|
const oldPC = currentUpstreamPC;
|
|
setTimeout(() => {
|
|
oldPC.close();
|
|
oldUpstreamPCs = oldUpstreamPCs.filter((pc) => pc !== oldPC);
|
|
}, 1000);
|
|
}
|
|
currentUpstreamPC = newPC;
|
|
};
|
|
|
|
newPC.onicecandidate = (event) => {
|
|
if (event.candidate) socket.emit('signal_msg', { target: senderId, type: 'candidate', candidate: event.candidate });
|
|
};
|
|
|
|
await newPC.setRemoteDescription(new RTCSessionDescription(sdp));
|
|
const answer = await newPC.createAnswer();
|
|
await newPC.setLocalDescription(answer);
|
|
socket.emit('signal_msg', { target: senderId, type: 'answer', sdp: answer });
|
|
}
|
|
|
|
async function setupDownstreamConnection(targetId) {
|
|
if (currentDownstreamPC) {
|
|
const oldPC = currentDownstreamPC;
|
|
oldDownstreamPCs.push(oldPC);
|
|
setTimeout(() => {
|
|
oldPC.close();
|
|
oldDownstreamPCs = oldDownstreamPCs.filter((pc) => pc !== oldPC);
|
|
}, 5000);
|
|
}
|
|
|
|
currentDownstreamPC = new RTCPeerConnection({ iceServers });
|
|
|
|
if (useDataChannelVideo) {
|
|
const videoDC = currentDownstreamPC.createDataChannel('video');
|
|
setupDownstreamVideoChannel(targetId, videoDC);
|
|
}
|
|
|
|
if (localStream) {
|
|
console.log('Sending local stream tracks to downstream');
|
|
localStream.getAudioTracks().forEach((track) => currentDownstreamPC.addTrack(track.clone(), localStream));
|
|
if (!useDataChannelVideo) {
|
|
localStream.getVideoTracks().forEach((track) => currentDownstreamPC.addTrack(track.clone(), localStream));
|
|
}
|
|
} else if (currentUpstreamPC) {
|
|
console.log('Relaying upstream stream tracks to downstream');
|
|
currentUpstreamPC.getReceivers().forEach((receiver) => {
|
|
if (!receiver.track) return;
|
|
if (receiver.track.kind === 'audio') {
|
|
const cloned = receiver.track.clone();
|
|
const stream = new MediaStream([cloned]);
|
|
currentDownstreamPC.addTrack(cloned, stream);
|
|
} else if (!useDataChannelVideo) {
|
|
const sentTrack = sentVideoTrack || receiver.track.clone();
|
|
const sentStream = sentVideoStream || new MediaStream([sentTrack]);
|
|
currentDownstreamPC.addTrack(sentTrack, sentStream);
|
|
}
|
|
});
|
|
}
|
|
|
|
currentDownstreamPC.onicecandidate = (event) => {
|
|
if (event.candidate) socket.emit('signal_msg', { target: targetId, type: 'candidate', candidate: event.candidate });
|
|
};
|
|
|
|
await Promise.all(
|
|
currentDownstreamPC
|
|
.getSenders()
|
|
.filter((sender) => sender.track && sender.track.kind === 'video')
|
|
.map(async (sender) => {
|
|
const params = sender.getParameters();
|
|
params.encodings = params.encodings.map((enc) => {
|
|
enc.maxBitrate = 200_000_000;
|
|
enc.maxFramerate = 60;
|
|
enc.scaleResolutionDownBy = 1.0;
|
|
enc.priority = 'high';
|
|
return enc;
|
|
});
|
|
params.degradationPreference = 'maintain-resolution';
|
|
await sender.setParameters(params);
|
|
})
|
|
);
|
|
|
|
const offer = await currentDownstreamPC.createOffer();
|
|
// offer.sdp = offer.sdp.replace(/b=AS:([0-9]+)/g, 'b=AS:4000');
|
|
// if (!offer.sdp.includes('b=AS:')) offer.sdp = offer.sdp.replace(/(m=video.*\r\n)/, '$1b=AS:4000\r\n');
|
|
|
|
await currentDownstreamPC.setLocalDescription(offer);
|
|
socket.emit('signal_msg', { target: targetId, type: 'offer', sdp: offer });
|
|
}
|
|
|
|
// --- 6. Debug Metrics (bitrate / loss / fps) ---
|
|
|
|
async function pollPeerMetrics() {
|
|
try {
|
|
if (useDataChannelVideo) {
|
|
const dcDisplay = computeDataChannelMetrics();
|
|
renderDataChannelMetrics(dcDisplay);
|
|
socket.emit('report_metrics', { inbound: dcDisplay.inbound, outbound: dcDisplay.outbound });
|
|
} else {
|
|
const upstreamResult = currentUpstreamPC
|
|
? await collectInboundMetrics(currentUpstreamPC, metricsHistory.upstream)
|
|
: null;
|
|
const downstreamResult = currentDownstreamPC
|
|
? await collectOutboundMetrics(currentDownstreamPC, metricsHistory.downstream)
|
|
: null;
|
|
|
|
metricsHistory.upstream = upstreamResult
|
|
? upstreamResult.snapshot
|
|
: currentUpstreamPC
|
|
? metricsHistory.upstream
|
|
: null;
|
|
metricsHistory.downstream = downstreamResult
|
|
? downstreamResult.snapshot
|
|
: currentDownstreamPC
|
|
? metricsHistory.downstream
|
|
: null;
|
|
|
|
renderMetrics(upstreamResult ? upstreamResult.display : null, downstreamResult ? downstreamResult.display : null);
|
|
|
|
// Stream metrics to the monitor dashboard
|
|
socket.emit('report_metrics', {
|
|
inbound: upstreamResult ? upstreamResult.display : null,
|
|
outbound: downstreamResult ? downstreamResult.display : null,
|
|
});
|
|
}
|
|
} catch (err) {
|
|
console.warn('Metrics poll failed', err);
|
|
}
|
|
}
|
|
|
|
async function collectInboundMetrics(pc, previous) {
|
|
const stats = await pc.getStats();
|
|
let inboundVideo = null;
|
|
let candidatePair = null;
|
|
|
|
const tag = pc.__metricsTag || (pc.__metricsTag = Math.random().toString(36).slice(2));
|
|
const prev = previous && previous.tag === tag ? previous : null;
|
|
|
|
stats.forEach((report) => {
|
|
if (report.type === 'inbound-rtp' && report.kind === 'video' && !report.isRemote) inboundVideo = report;
|
|
if (report.type === 'candidate-pair' && report.state === 'succeeded' && report.nominated) candidatePair = report;
|
|
});
|
|
|
|
if (!inboundVideo) return null;
|
|
|
|
const deltaMs = prev ? inboundVideo.timestamp - prev.timestamp : null;
|
|
const bytesDelta = prev ? inboundVideo.bytesReceived - prev.bytesReceived : null;
|
|
const bitrateKbps = deltaMs && deltaMs > 0 && bytesDelta >= 0 ? (bytesDelta * 8) / deltaMs : null; // timestamp is ms
|
|
const framesDelta =
|
|
prev && inboundVideo.framesDecoded !== undefined && prev.framesDecoded !== undefined
|
|
? inboundVideo.framesDecoded - prev.framesDecoded
|
|
: null;
|
|
const fps = framesDelta !== null && deltaMs && deltaMs > 0 ? (framesDelta * 1000) / deltaMs : null;
|
|
|
|
const packetLossPct =
|
|
inboundVideo.packetsLost !== undefined && inboundVideo.packetsReceived !== undefined
|
|
? (inboundVideo.packetsLost / (inboundVideo.packetsReceived + inboundVideo.packetsLost)) * 100
|
|
: null;
|
|
const jitterMs = inboundVideo.jitter !== undefined ? inboundVideo.jitter * 1000 : null;
|
|
const rttMs =
|
|
candidatePair && candidatePair.currentRoundTripTime !== undefined
|
|
? candidatePair.currentRoundTripTime * 1000
|
|
: null;
|
|
|
|
const codecReport = inboundVideo.codecId && stats.get ? stats.get(inboundVideo.codecId) : null;
|
|
const codecLabel = codecReport ? codecReport.mimeType || codecReport.codecId || codecReport.sdpFmtpLine || '' : null;
|
|
|
|
return {
|
|
display: {
|
|
bitrateKbps,
|
|
fps,
|
|
resolution:
|
|
inboundVideo.frameWidth && inboundVideo.frameHeight
|
|
? `${inboundVideo.frameWidth}x${inboundVideo.frameHeight}`
|
|
: null,
|
|
packetLossPct,
|
|
jitterMs,
|
|
rttMs,
|
|
pli: inboundVideo.pliCount,
|
|
nack: inboundVideo.nackCount,
|
|
fir: inboundVideo.firCount,
|
|
framesDropped: inboundVideo.framesDropped,
|
|
codec: codecLabel,
|
|
state: pc.iceConnectionState || pc.connectionState,
|
|
},
|
|
snapshot: {
|
|
timestamp: inboundVideo.timestamp,
|
|
bytesReceived: inboundVideo.bytesReceived || 0,
|
|
framesDecoded: inboundVideo.framesDecoded || 0,
|
|
tag,
|
|
},
|
|
};
|
|
}
|
|
|
|
async function collectOutboundMetrics(pc, previous) {
|
|
const stats = await pc.getStats();
|
|
let outboundVideo = null;
|
|
let remoteInbound = null;
|
|
let candidatePair = null;
|
|
|
|
const tag = pc.__metricsTag || (pc.__metricsTag = Math.random().toString(36).slice(2));
|
|
const prev = previous && previous.tag === tag ? previous : null;
|
|
|
|
stats.forEach((report) => {
|
|
if (report.type === 'outbound-rtp' && report.kind === 'video' && !report.isRemote) outboundVideo = report;
|
|
if (report.type === 'remote-inbound-rtp' && report.kind === 'video') remoteInbound = report;
|
|
if (report.type === 'candidate-pair' && report.state === 'succeeded' && report.nominated) candidatePair = report;
|
|
});
|
|
|
|
if (!outboundVideo) return null;
|
|
|
|
const deltaMs = prev ? outboundVideo.timestamp - prev.timestamp : null;
|
|
const bytesDelta = prev ? outboundVideo.bytesSent - prev.bytesSent : null;
|
|
const bitrateKbps = deltaMs && deltaMs > 0 && bytesDelta >= 0 ? (bytesDelta * 8) / deltaMs : null;
|
|
const framesDelta =
|
|
prev && outboundVideo.framesEncoded !== undefined && prev.framesEncoded !== undefined
|
|
? outboundVideo.framesEncoded - prev.framesEncoded
|
|
: null;
|
|
const fps = framesDelta !== null && deltaMs && deltaMs > 0 ? (framesDelta * 1000) / deltaMs : null;
|
|
|
|
let packetLossPct = null;
|
|
if (remoteInbound && remoteInbound.packetsLost !== undefined && remoteInbound.packetsReceived !== undefined) {
|
|
packetLossPct = (remoteInbound.packetsLost / (remoteInbound.packetsReceived + remoteInbound.packetsLost)) * 100;
|
|
}
|
|
|
|
const rttMs =
|
|
candidatePair && candidatePair.currentRoundTripTime !== undefined
|
|
? candidatePair.currentRoundTripTime * 1000
|
|
: null;
|
|
const codecReport = outboundVideo.codecId && stats.get ? stats.get(outboundVideo.codecId) : null;
|
|
const codecLabel = codecReport ? codecReport.mimeType || codecReport.codecId || codecReport.sdpFmtpLine || '' : null;
|
|
|
|
return {
|
|
display: {
|
|
bitrateKbps,
|
|
fps,
|
|
resolution:
|
|
outboundVideo.frameWidth && outboundVideo.frameHeight
|
|
? `${outboundVideo.frameWidth}x${outboundVideo.frameHeight}`
|
|
: null,
|
|
packetLossPct,
|
|
rttMs,
|
|
qualityLimit: outboundVideo.qualityLimitationReason || 'none',
|
|
nack: outboundVideo.nackCount,
|
|
pli: outboundVideo.pliCount,
|
|
fir: outboundVideo.firCount,
|
|
retransmits: outboundVideo.retransmittedPacketsSent,
|
|
codec: codecLabel,
|
|
state: pc.iceConnectionState || pc.connectionState,
|
|
},
|
|
snapshot: {
|
|
timestamp: outboundVideo.timestamp,
|
|
bytesSent: outboundVideo.bytesSent || 0,
|
|
framesEncoded: outboundVideo.framesEncoded || 0,
|
|
tag,
|
|
},
|
|
};
|
|
}
|
|
|
|
function renderMetrics(inboundDisplay, outboundDisplay) {
|
|
if (useDataChannelVideo) {
|
|
return;
|
|
}
|
|
if (!currentUpstreamPC) {
|
|
upstreamMetricsBox.innerHTML = 'No upstream peer (head broadcaster).';
|
|
} else if (!inboundDisplay) {
|
|
upstreamMetricsBox.innerHTML = 'Collecting inbound stats...';
|
|
} else {
|
|
upstreamMetricsBox.innerHTML = metricsLines([
|
|
['State', inboundDisplay.state || '--'],
|
|
['Bitrate', formatBitrate(inboundDisplay.bitrateKbps)],
|
|
['FPS', formatNumber(inboundDisplay.fps)],
|
|
['Resolution', inboundDisplay.resolution || '--'],
|
|
['Loss', formatPercent(inboundDisplay.packetLossPct)],
|
|
['Jitter', formatMillis(inboundDisplay.jitterMs)],
|
|
['RTT', formatMillis(inboundDisplay.rttMs)],
|
|
['PLI/NACK/FIR', formatTriple(inboundDisplay.pli, inboundDisplay.nack, inboundDisplay.fir)],
|
|
['Frames Dropped', formatCount(inboundDisplay.framesDropped)],
|
|
['Codec', inboundDisplay.codec || '--'],
|
|
]);
|
|
}
|
|
|
|
if (!currentDownstreamPC) {
|
|
downstreamMetricsBox.innerHTML = 'No downstream peer connected.';
|
|
} else if (!outboundDisplay) {
|
|
downstreamMetricsBox.innerHTML = 'Collecting outbound stats...';
|
|
} else {
|
|
downstreamMetricsBox.innerHTML = metricsLines([
|
|
['State', outboundDisplay.state || '--'],
|
|
['Bitrate', formatBitrate(outboundDisplay.bitrateKbps)],
|
|
['FPS', formatNumber(outboundDisplay.fps)],
|
|
['Resolution', outboundDisplay.resolution || '--'],
|
|
['Loss (remote)', formatPercent(outboundDisplay.packetLossPct)],
|
|
['RTT', formatMillis(outboundDisplay.rttMs)],
|
|
['Quality Limit', outboundDisplay.qualityLimit || '--'],
|
|
['PLI/NACK/FIR', formatTriple(outboundDisplay.pli, outboundDisplay.nack, outboundDisplay.fir)],
|
|
['Retransmits', formatCount(outboundDisplay.retransmits)],
|
|
['Codec', outboundDisplay.codec || '--'],
|
|
]);
|
|
}
|
|
}
|
|
|
|
function metricsLines(rows) {
|
|
return rows
|
|
.map(([label, value]) => `<div class="metrics-line"><span>${label}</span><span>${value}</span></div>`)
|
|
.join('');
|
|
}
|
|
|
|
function formatNumber(value, decimals = 1) {
|
|
return Number.isFinite(value) ? value.toFixed(decimals) : '--';
|
|
}
|
|
|
|
function formatBitrate(kbps) {
|
|
return Number.isFinite(kbps) ? `${kbps.toFixed(0)} kbps` : '--';
|
|
}
|
|
|
|
function formatPercent(value) {
|
|
return Number.isFinite(value) ? `${value.toFixed(1)}%` : '--';
|
|
}
|
|
|
|
function formatMillis(value) {
|
|
return Number.isFinite(value) ? `${value.toFixed(1)} ms` : '--';
|
|
}
|
|
|
|
function formatCount(value) {
|
|
return Number.isFinite(value) ? `${value}` : '--';
|
|
}
|
|
|
|
function formatTriple(a, b, c) {
|
|
const pa = Number.isFinite(a) ? a : '-';
|
|
const pb = Number.isFinite(b) ? b : '-';
|
|
const pc = Number.isFinite(c) ? c : '-';
|
|
return `${pa}/${pb}/${pc}`;
|
|
}
|
|
|
|
function computeDataChannelMetrics() {
|
|
const now = performance.now();
|
|
const v = dcStats.video;
|
|
if (v.lastSampleTs === null) {
|
|
v.lastSampleTs = now;
|
|
v.lastTxBytes = v.txBytes;
|
|
v.lastRxBytes = v.rxBytes;
|
|
v.lastTxFrames = v.txFrames;
|
|
v.lastRxFrames = v.rxFrames;
|
|
}
|
|
const deltaMs = now - v.lastSampleTs;
|
|
if (deltaMs > 0) {
|
|
const txBytesDelta = v.txBytes - v.lastTxBytes;
|
|
const rxBytesDelta = v.rxBytes - v.lastRxBytes;
|
|
const txFramesDelta = v.txFrames - v.lastTxFrames;
|
|
const rxFramesDelta = v.rxFrames - v.lastRxFrames;
|
|
v.bitrateKbpsTx = txBytesDelta > 0 ? (txBytesDelta * 8) / deltaMs : 0;
|
|
v.bitrateKbpsRx = rxBytesDelta > 0 ? (rxBytesDelta * 8) / deltaMs : 0;
|
|
v.fpsTx = txFramesDelta > 0 ? (txFramesDelta * 1000) / deltaMs : 0;
|
|
v.fpsRx = rxFramesDelta > 0 ? (rxFramesDelta * 1000) / deltaMs : 0;
|
|
v.lastSampleTs = now;
|
|
v.lastTxBytes = v.txBytes;
|
|
v.lastRxBytes = v.rxBytes;
|
|
v.lastTxFrames = v.txFrames;
|
|
v.lastRxFrames = v.rxFrames;
|
|
}
|
|
return {
|
|
inbound: {
|
|
bitrateKbps: v.bitrateKbpsRx,
|
|
fps: v.fpsRx,
|
|
resolution: decoderConfigCache ? `${decoderConfigCache.codedWidth}x${decoderConfigCache.codedHeight}` : null,
|
|
state: 'datachannel',
|
|
bytes: v.rxBytes,
|
|
frames: v.rxFrames,
|
|
},
|
|
outbound: {
|
|
bitrateKbps: v.bitrateKbpsTx,
|
|
fps: v.fpsTx,
|
|
resolution: decoderConfigCache ? `${decoderConfigCache.codedWidth}x${decoderConfigCache.codedHeight}` : null,
|
|
state: 'datachannel',
|
|
bytes: v.txBytes,
|
|
frames: v.txFrames,
|
|
},
|
|
};
|
|
}
|
|
|
|
function renderDataChannelMetrics(display) {
|
|
upstreamMetricsBox.innerHTML = metricsLines([
|
|
['Path', 'DataChannel'],
|
|
['Bitrate RX', formatBitrate(display.inbound.bitrateKbps)],
|
|
['FPS RX', formatNumber(display.inbound.fps)],
|
|
['Resolution', display.inbound.resolution || '--'],
|
|
]);
|
|
downstreamMetricsBox.innerHTML = metricsLines([
|
|
['Path', 'DataChannel'],
|
|
['Bitrate TX', formatBitrate(display.outbound.bitrateKbps)],
|
|
['FPS TX', formatNumber(display.outbound.fps)],
|
|
['Resolution', display.outbound.resolution || '--'],
|
|
]);
|
|
}
|