Im Node.js einen neuen ffmpeg Prozess abspalten und einen rtmp Livestream in Video und Audio trennen. ffmpeg = child_process.spawn("ffmpeg-re","-y", "-i","rtmp://server.name:port/application/stream", "-f","tee", "-map","0:0","-an","-f","image2pipe","-r","30","-q:v","1","-vf","select=gt(scene\\,0.001),scale=320:240","-vcodec","ppm","video.mjpeg", "-map","0:1","-vn","-acodec","pcm_s32le","-ac","1","-ar","44100","audio.wav" ]); Die Motion JPEG Daten Frame für Frame annehmen, und durch ImageMagick pumpen um sie ein wenig zu sch?rfen. video.on('dataunction(data) { var sharpen = new Buffer('','binary'); var im = child_process.spawn("convert.exepm:-","-sharpen","1x1","-"]); im.stdout.on('dataunction(imdata) { sharpen += imdata; }); }); Die aufbereiteten Bilder per Socket.IO ans Frontend senden (broadcast) im.on('exitunction(code) { var frame = new Buffer(sharpen).toString('base64'); socket.broadcast.to(room).emit('canvas',frame Die Daten im Frontend annehmen und auf die Canvas kopieren sock.on('canvasunction(data) { try { var canvas = document.getElementById('videostreamshow'); var context = canvas.getContext('2d'); var imageObj = new Image(); imageObj.src = "data:image/jpeg;base64," + data; imageObj.onload = function(){ context.height = imageObj.height; context.width = imageObj.width; context.drawImage(imageObj,0 ,0,320,150); } } catch(e){ } }); |
|