浏览器调用摄像头

原本是想看看PHP做直播的知识,百度到一篇文章是这样来搞的,JavaScript调用到摄像头,通过截图方式(把视频置入canvas中得到截图)传到服务器(通过WebSocket),然后服务器在发给客户端。这个流程只有图片过去了,声音没过去,看了一个假直播的实现文章。不过这里记录一下调用摄像头的JavaScript吧。

<!DOCTYPE html>
<html lang="zh-CN">
<head>
    <meta charset="UTF-8">
    <title>调用摄像头</title>
</head>
<body>
    <!-- 自动播放参数必须有,否则无法正常播放 -->
    <video id="video" src="" width="640" height="480" autoplay style="background: #000"></video>
    <a href="javascript:openMedia();">打开</a>
    <a href="javascript:closeMedia();">关闭</a>

    <script>
        var video = document.getElementById('video');
        var mediaStreamTrack;
        // 打开摄像头
        function openMedia() {
            navigator.getUserMedia = navigator.getUserMedia || navigator.webkitGetUserMedia || navigator.mozGetUserMedia || navigator.msGetUserMedia;
            navigator.getUserMedia({video: true, audio: false}, getMedia, console.log);
        }
        // 把媒体流放入video标签
        function getMedia(stream) {
            // video.src = URL.createObjectURL(stream);
            video.srcObject = stream;
            mediaStreamTrack = stream;
        }
        // 关闭摄像头
        function closeMedia() {
            if (mediaStreamTrack) {
                mediaStreamTrack.getTracks()[0].stop();
            }
        }
    </script>
</body>
</html>