streamana/test.html

168 lines
4.8 KiB
HTML
Raw Normal View History

2021-05-16 04:41:37 +08:00
<html>
<head>
2021-05-19 06:37:01 +08:00
<script type="text/javascript" src="https://unpkg.com/glsl-canvas-js/dist/umd/glsl-canvas.min.js"></script>
<script type="text/javascript" src="glsl-canvas.js"></script>
2021-05-16 04:41:37 +08:00
<script type="text/javascript">
async function init() {
const stream_url = window.location.hash.substring(1);
// capture video from webcam
const stream = await navigator.mediaDevices.getUserMedia({
audio: true,
video: {
width: 4096,
height: 2160,
frameRate: {
ideal: 30,
max: 30
}
}
});
// create video element which will be used for grabbing the frames to
// write to a canvas so we can apply webgl shaders
// also used to get the native video dimensions
const video = document.createElement("video");
video.muted = true;
2021-05-19 06:37:01 +08:00
// create a canvas for doing webgl
const canvas = document.createElement('canvas');
2021-05-16 04:41:37 +08:00
2021-05-19 06:37:01 +08:00
// use glsl-canvas to make managing webgl stuff easier
const gl_canvas = new glsl.Canvas(new Proxy(canvas, {
get: function (target, name, receiver) {
if (name === 'getBoundingClientRect') {
return () => new DOMRect(0, 0, target.width, target.height);
}
if (name === 'clientWidth') {
return target.width;
}
if (name === 'clientHeight') {
return target.height;
}
const r = target[name];
return typeof r === 'function' ? r.bind(target) : r;
},
set: function (target, name, value) {
target[name] = value;
return true;
}
}));
2021-05-16 04:41:37 +08:00
2021-05-19 06:37:01 +08:00
// sample greyscale fragment shader
const fragmentShader = `
2021-05-16 04:41:37 +08:00
precision highp float;
uniform sampler2D u_texture;
uniform vec2 u_resolution;
void main() {
vec2 st = gl_FragCoord.xy / u_resolution.xy;
vec3 color = texture2D(u_texture, st).rgb;
float grey = dot(color, vec3(0.299, 0.587, 0.114));
gl_FragColor = vec4(vec3(grey), 1.0);
}`;
2021-05-19 06:37:01 +08:00
gl_canvas.load(fragmentShader);
2021-05-16 04:41:37 +08:00
2021-05-19 06:37:01 +08:00
// tell canvas to use frames from video
// TODO: can we set update interval?
gl_canvas.setTexture('u_texture', video, /*{ updateInterval: 33 }*/);
video.addEventListener('loadeddata', function () {
// make canvas same size as native video dimensions so every pixel is seen
canvas.width = video.videoWidth;
canvas.height = video.videoHeight;
this.play();
2021-05-16 04:41:37 +08:00
// capture video from the canvas
const video_stream = canvas.captureStream(30);
video_stream.addTrack(stream.getAudioTracks()[0]);
2021-05-19 06:37:01 +08:00
// record the video
const recorder = new MediaRecorder(video_stream, {
mimeType: "video/webm;codecs=H264",
audioBitsPerSecond: 128 * 1000,
videoBitsPerSecond: 2500 * 1000
});
// push encoded data into the ffmpeg worker
recorder.ondataavailable = async function (event) {
const data = await event.data.arrayBuffer();
ffmpeg_hls.postMessage({
type: 'video-data',
data: data
}, [data]);
};
2021-05-16 04:41:37 +08:00
const ffmpeg_hls = new Worker('ffmpeg.js/ffmpeg-worker-hls.js');
ffmpeg_hls.onmessage = function (e) {
const msg = e.data;
switch (msg.type) {
case 'ready':
ffmpeg_hls.postMessage({
type: 'run',
arguments: [
'-i', '-',
//'-use_wallclock_as_timestamps', '1',
'-f', 'hls',
'-c:v', 'copy',
'-c:a', 'aac',
'-b:a', '128k',
'-hls_time', '2',
'-hls_segment_type', 'mpegts',
'-hls_list_size', '2',
'/outbound/output.m3u8'
]
});
break;
case 'run':
console.log("RUNNING");
break;
case 'stdout':
console.log(msg.data);
break;
case 'stderr':
console.error(msg.data);
break;
case 'exit':
console.log("EXITED", msg.data);
recorder.stop();
break;
case 'done':
// due to async stdin read, we get this message immediately
//console.log("DONE", msg.data);
break;
case 'error':
console.error("ERROR", msg.data);
break;
case 'abort':
console.error("ABORT", msg.data);
break;
case 'start-video':
ffmpeg_hls.postMessage({
type: 'base-url',
data: stream_url
});
// produce data every second, we'll be chunking it anyway
recorder.start(1000);
break;
}
};
2021-05-19 06:37:01 +08:00
// display the video locally so we can see what's going on
// note the video seems to set its height automatically to keep the
// correct aspect ratio
const monitor = document.getElementById('monitor');
monitor.srcObject = video_stream;
monitor.play();
2021-05-16 04:41:37 +08:00
});
video.srcObject = stream;
}
</script>
</head>
<body onload="init()">
<video style="width:50%" id="monitor" muted="true"></video>
</body>
</html>