streamana/site/example.js

219 lines
7.7 KiB
JavaScript
Raw Normal View History

2021-07-06 15:35:17 +08:00
import { GlCanvas } from './gl-canvas.js';
import { HLS } from './hls.js';
2021-05-29 04:51:06 +08:00
import shader from './greyscale-shader.js';
const ingestion_url_el = document.getElementById('ingestion-url');
ingestion_url_el.value = localStorage.getItem('streamana-example-ingestion-url');
const go_live_el = document.getElementById('go-live');
go_live_el.disabled = false;
go_live_el.addEventListener('click', function () {
if (this.checked) {
start();
} else {
stop();
}
});
2021-07-06 15:35:17 +08:00
let canvas_el = document.getElementById('canvas');
const canvas_proto = canvas_el.cloneNode();
2021-05-29 04:51:06 +08:00
const waiting_el = document.getElementById('waiting');
const error_alert_el = document.getElementById('error-alert');
const error_alert_el_parent = error_alert_el.parentNode;
const error_alert_el_nextSibling = error_alert_el.nextSibling;
error_alert_el_parent.removeChild(error_alert_el);
const ffmpeg_lib_url_el = document.getElementById('ffmpeg-lib-url');
ffmpeg_lib_url_el.value = localStorage.getItem('streamana-ffmpeg-lib-url');
ffmpeg_lib_url_el.addEventListener('input', function (e) {
localStorage.setItem('streamana-ffmpeg-lib-url', this.value);
});
2021-07-06 15:35:17 +08:00
let hls;
2021-05-29 04:51:06 +08:00
async function start() {
const ingestion_url = ingestion_url_el.value.trim();
if (!ingestion_url) {
go_live_el.checked = false;
return;
}
localStorage.setItem('streamana-example-ingestion-url', ingestion_url);
const ffmpeg_lib_url = ffmpeg_lib_url_el.value.trim() ||
ffmpeg_lib_url_el.placeholder.trim();
go_live_el.disabled = true;
waiting_el.classList.remove('d-none');
2021-07-17 15:44:02 +08:00
const canvas_el_parent = canvas_el.parentNode;
canvas_el_parent.removeChild(canvas_el);
2021-07-06 15:35:17 +08:00
canvas_el = canvas_proto.cloneNode();
canvas_el.classList.add('invisible');
2021-07-17 15:44:02 +08:00
canvas_el_parent.appendChild(canvas_el);
2021-05-29 04:51:06 +08:00
if (error_alert_el.parentNode) {
error_alert_el_parent.removeChild(error_alert_el);
}
let camera_stream, gl_canvas, canvas_stream, done = false;
function cleanup(err) {
2021-07-06 15:35:17 +08:00
if (err) {
console.error(err);
}
2021-05-29 04:51:06 +08:00
if (done) {
return;
}
done = true;
if (err) {
error_alert_el_parent.insertBefore(error_alert_el, error_alert_el_nextSibling);
error_alert_el.classList.add('show');
}
if (camera_stream) {
for (let track of camera_stream.getTracks()) {
track.stop();
}
}
if (gl_canvas) {
gl_canvas.destroy();
}
if (canvas_stream) {
for (let track of canvas_stream.getTracks()) {
track.stop();
}
}
2021-07-06 15:35:17 +08:00
if (hls) {
hls.end(!!err);
}
2021-05-29 04:51:06 +08:00
go_live_el.checked = false;
go_live_el.disabled = false;
waiting_el.classList.add('d-none');
2021-07-06 15:35:17 +08:00
canvas_el.classList.add('d-none');
2021-05-29 04:51:06 +08:00
}
try {
2021-06-05 06:50:03 +08:00
// create video element which will be used for grabbing the frames to
// write to a canvas so we can apply webgl shaders
// also used to get the native video dimensions
2021-07-06 15:35:17 +08:00
const video_el = document.createElement('video');
video_el.muted = true;
video_el.playsInline = true;
2021-06-05 06:50:03 +08:00
// Safari on iOS requires us to play() in the click handler and doesn't
// track async calls. So we play a blank video first. After that, the video
// element is blessed for script-driver playback.
2021-07-06 15:35:17 +08:00
video_el.src = 'empty.mp4';
await video_el.play();
2021-06-05 06:50:03 +08:00
2021-05-29 04:51:06 +08:00
// capture video from webcam
const video_constraints = {
2021-06-05 06:50:03 +08:00
//width: 4096,
//height: 2160,
width: 1280,
height: 720,
frameRate: {
ideal: 30,
max: 30
2021-05-29 04:51:06 +08:00
}
};
try {
camera_stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: video_constraints
});
} catch (ex) {
// retry in case audio isn't available
console.warn("Failed to get user media, retrying without audio");
camera_stream = await navigator.mediaDevices.getUserMedia({
audio: false,
video: video_constraints
});
}
2021-05-29 04:51:06 +08:00
// use glsl-canvas to make managing webgl stuff easier
// because it's not visible, client dimensions are zero so we
// need to substitute actual dimensions instead
gl_canvas = new GlCanvas(canvas_el, {
// as an example, greyscale the stream
fragmentString: shader
});
2021-05-29 04:51:06 +08:00
gl_canvas.on('error', cleanup);
2021-05-29 04:51:06 +08:00
// tell canvas to use frames from video
2021-07-06 15:35:17 +08:00
gl_canvas.setTexture('u_texture', video_el);
2021-05-29 04:51:06 +08:00
// wait for video to load (must come after gl_canvas.setTexture() since it
// registers a loadeddata handler which then registers a play handler)
2021-07-09 05:40:31 +08:00
video_el.addEventListener('loadeddata', async function () {
2021-05-29 04:51:06 +08:00
try {
// make canvas same size as native video dimensions so every pixel is seen
2021-07-17 15:44:02 +08:00
const portrait = this.videoHeight > this.videoWidth;
if (portrait) {
canvas_el.width = this.videoHeight;
canvas_el.height = this.videoWidth;
2021-07-17 15:44:02 +08:00
canvas_el.classList.add('portrait');
} else {
canvas_el.width = this.videoWidth;
canvas_el.height = this.videoHeight;
}
2021-07-17 15:44:02 +08:00
gl_canvas.setUniform('u_portrait', portrait);
2021-05-29 04:51:06 +08:00
// start the camera video
this.play();
// capture video from the canvas
2021-06-05 23:28:38 +08:00
// Note: Safari on iOS doesn't get any data, might be related to
// https://bugs.webkit.org/show_bug.cgi?id=181663
const frame_rate = camera_stream.getVideoTracks()[0].getSettings().frameRate;
canvas_stream = canvas_el.captureStream(frame_rate);
// add audio if present
2021-06-04 15:09:56 +08:00
const audio_tracks = camera_stream.getAudioTracks();
if (audio_tracks.length > 0) {
canvas_stream.addTrack(audio_tracks[0]);
}
2021-05-29 04:51:06 +08:00
2021-07-17 15:44:02 +08:00
function update() {
// update the canvas
if (gl_canvas.onLoop() && portrait) {
canvas_el.style.height = canvas_el_parent.clientWidth;
}
}
2021-05-29 04:51:06 +08:00
// start HLS from the canvas stream to the ingestion URL
hls = new HLS(canvas_stream, ingestion_url, ffmpeg_lib_url, frame_rate, portrait);
2021-07-06 15:35:17 +08:00
hls.addEventListener('run', () => console.log('HLS running'));
hls.addEventListener('exit', ev => {
2021-07-09 03:55:27 +08:00
const msg = `HLS exited with status ${ev.detail.code}`;
2021-07-09 05:40:31 +08:00
if (ev.detail.code === 0) {
2021-05-29 04:51:06 +08:00
console.log(msg);
cleanup();
} else {
cleanup(msg);
}
});
2021-07-06 15:35:17 +08:00
hls.addEventListener('error', cleanup);
hls.addEventListener('start', () => {
2021-05-29 04:51:06 +08:00
waiting_el.classList.add('d-none');
2021-07-06 15:35:17 +08:00
canvas_el.classList.remove('invisible');
go_live_el.disabled = false;
2021-07-17 15:44:02 +08:00
update();
2021-07-06 15:35:17 +08:00
});
2021-07-17 15:44:02 +08:00
hls.addEventListener('update', update);
2021-07-09 05:40:31 +08:00
await hls.start();
2021-05-29 04:51:06 +08:00
} catch (ex) {
cleanup(ex);
}
});
// pass the stream from the camera to the video so it can render the frames
2021-07-06 15:35:17 +08:00
video_el.srcObject = camera_stream;
2021-05-29 04:51:06 +08:00
} catch (ex) {
return cleanup(ex);
}
}
function stop() {
go_live_el.disabled = true;
2021-07-06 15:35:17 +08:00
hls.end();
2021-05-29 04:51:06 +08:00
}