使用 WebAudio API 时,音频和视频 HTML5 元素中的音频被静音

我正在尝试从视频中获取音频以与 Web Audio API 一起使用。但视频中的音频被静音。当我在本地测试此代码时,HTML5 音频正在工作(在 jsfiddle 上,当 Web Audio API 打开时,它不起作用),但在本地和 jsfiddle 视频上没有音频(它是静音的,用户无法更改它)。控制台中没有显示错误。我添加了适用于自动播放政策更改的功能:https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#webaudio。这是我的代码:

<!doctype html>

<html>

<head>

  <meta charset="utf-8">

  <title>MyTitle</title>

  <base href="/">

  <meta name="viewport" content="width=device-width, initial-scale=1">

  <link rel="icon" type="image/x-icon" href="favicon.ico">

  <script src="https://unpkg.com/mathjs@7.6.0/dist/math.js"></script>

</head>

<body>

  <button type="button" id="start_web_audio" onclick="startFunction()">Click to allow Web Audio</button> <p>Required to start webaudio API due to changes in autoplay policy on modern browsers</p>

  

    <audio src="https://raw.githubusercontent.com/VGFP/AudioSamplesForBabylonJSProject/master/VideoDemo/V3_Voice_Only_ChID-BLITS-EBU-Narration.ogg" controls id="video" loop></audio>

  

  <!-- <video src="https://download.dolby.com/us/en/test-tones/dolby-atmos-trailer_amaze_1080.mp4" width=600 height=400 controls id="video"></video> -->


</body>

</html>


慕婉清6462132
浏览 191回答 1
1回答

侃侃无极

这是正常行为,当创建 MediaElementSource (MES) 时,其目标 MediaElement 会将其音频流路由到 MES,并且如果此 MES 未连接到 AudioContext 的目标,则 MediaSource 的音频将被静音。但这里发生的情况并非如此。这里你的问题是文件没有按照Same-Origin-Policy提供,因此你的代码不允许读取该媒体的内容,并且在 MSE 中执行声音被视为从脚本中读取媒体(因为您可以在之后的任何地方连接此 MSE)。所以 MSE 只会产生沉默。为了使您的代码正常工作,您需要使用正确的Access-Control-Allow-Origin标头来提供该文件,并让您的页面使用crossorigin属性集来请求该文件。但是download.dolby.com未配置为允许此类跨源访问,因此您必须将此文件托管在其他位置。在下面的演示中,我们将使用wikimedia.org提供的文件,该文件允许跨域访问。// Required by new google policy more here: https://developers.google.com/web/updates/2017/09/autoplay-policy-changes#webaudiovar context = null;var myAudio, source, splitter, listener = null;var FrontLeft, FrontCenter, FrontRight, SurroundLeft, SurroundRight, Sub = null;var pannerNodesObjects = [FrontLeft, FrontCenter, FrontRight, SurroundLeft, SurroundRight, Sub];var distanceFromScreen, screenCenterY = null;var x_FrontLeft, y_FrontLeft, z_FrontLeft = null;var x_FrontCenter, y_FrontCenter, z_FrontCenter = null;var x_FrontRight, y_FrontRight, z_FrontRight = null;var x_SurroundLeft, y_SurroundLeft, z_SurroundLeft = null;var x_SurroundRight, y_SurroundRight, z_SurroundRight = null;var x_Sub, y_Sub, z_Sub = null;var web_Audio_enable = false;var initailPosition = [[x_FrontLeft, y_FrontLeft, z_FrontLeft],[x_FrontCenter, y_FrontCenter, z_FrontCenter],[x_FrontRight, y_FrontRight, z_FrontRight],[x_SurroundLeft, y_SurroundLeft, z_SurroundLeft],[x_SurroundRight, y_SurroundRight, z_SurroundRight],[x_Sub, y_Sub, z_Sub]];    // One-liner to resume playback when user interacted with the page.function startFunction() {      // Create splitter      context = new AudioContext();      // get the audio element      myAudio = document.getElementById('video');            // myAudio = document.querySelector('video');      source = context.createMediaElementSource(myAudio);      // var dest = context.createMediaStreamDestination();      //Spliter channels L, R, SL, SR, C, LFE      splitter = new ChannelSplitterNode(context, { numberOfOutputs: 6 });      // let channel_merger = new ChannelMergerNode(context, {numberOfInputs: 2});      listener = context.listener;      source.connect(splitter);      web_Audio_enable = true;      start_function();      console.log('Playback resumed successfully');  }//Estimate screen width for sound source placement//NOTE: this is estimation it is not very accurate but for this project it is acurate enought// Used by create_BabylonCamerafunction estimate_ScreenParams() {    var $el = document.createElement('div');    $el.style.width = '1cm';    $el.style.height = '1cm';    $el.style.backgroundColor = '#ff0000';    $el.style.position = 'fixed';    $el.style.bottom = 0;    document.body.appendChild($el);    var screenHeight = window.screen.height / $el.offsetHeight;    var screenWidth = window.screen.width / $el.offsetWidth;    console.log("Screen Width in cm: " + screenWidth);    console.log("Screen Height in cm: " + screenHeight);    var screenDiagonalInches = Math.sqrt(Math.pow((window.screen.width / $el.offsetWidth), 2) + Math.pow((window.screen.height / $el.offsetHeight), 2)) / 2.54;    console.log("Screen Diagonal in in: " + screenDiagonalInches);    document.body.removeChild($el);    //Screen center height in meters    var screenCenterY = (screenHeight / 2) / 100;    //Calculate distance form screen based on estimated screen diagonal length and resolution    //Screen resolution    var screenResWidth = window.screen.width * window.devicePixelRatio;    var screenResHeight = window.screen.height * window.devicePixelRatio;    var loc_distanceFromScreen = null;    //distanceFromScreen will be used for initial positioning of the  Surround Left and Right speakers    // Distance is in meters    if (screenDiagonalInches < 14) {        loc_distanceFromScreen = 0.61; //minimum distance    }    else {        loc_distanceFromScreen = 0.61 + (Math.round(screenDiagonalInches - 14) / 2) * 0.15;    }    console.log("Estimated distance from screen: " + loc_distanceFromScreen);    distanceFromScreen = loc_distanceFromScreen;    return screenCenterY;}function set_pannerNode(node, panningModel /* 'HRTF' */, distanceModel /* Possible values are "linear", "inverse" and "exponential". The default value is "inverse". */, refDistance, maxDistance, rolloffFactor, coneInnerAngle, coneOuterAngle, coneOuterGain, x, y, z /* position */, n /* index of the pannerNodesObjects */) {    node = context.createPanner();    // Seting options    node.panningModel = panningModel;    node.distanceModel = distanceModel;    node.refDistance = refDistance;    node.maxDistance = maxDistance;    node.rolloffFactor = rolloffFactor;    node.coneInnerAngle = coneInnerAngle;    node.coneOuterAngle = coneOuterAngle;    node.coneOuterGain = coneOuterGain;    // Setting position    if (node.positionX) {        node.positionX.setValueAtTime(x, context.currentTime);        node.positionY.setValueAtTime(y, context.currentTime);        node.positionZ.setValueAtTime(z, context.currentTime);    } else {        node.setPosition(x, y, z);    }    pannerNodesObjects[n] = node;}// Function to rotate // cx, cy, cz - global center of rotationvar temp_position = [[-0.12, 0.835, 0], /* Front left */                     [0.12, 0.835, 0], /* Front right */                     [0.0, 0.85, 0], /* Front center */                     [0.08, 0.84, 0], /* Sub */                     [-0.17, -0.83, 0], /* Surround left */                     [0.17, -0.83, 0]]; /* Surround right */function set_rotation(x, y, angle, n /* n is a int that says what initial position to change it is the index of the speaker 0 - FL, 1 - FC, 2 - FR etc. */) {    /*     Currently not used    */    initailPosition[n][0] = temp_position[n][0];    initailPosition[n][1] = temp_position[n][1];    initailPosition[n][2] = temp_position[n][2];    console.log(n + " x: " + initailPosition[n][0] + " y: " + initailPosition[n][1] + " z: "+initailPosition[n][2]);    return [initailPosition[n][0], initailPosition[n][1], initailPosition[n][2]];}function rotate(node_obj, pitch, roll, yaw, i) {    var cosa = Math.cos(yaw);    var sina = Math.sin(yaw);    var cosb = Math.cos(pitch);    var sinb = Math.sin(pitch);    var cosc = Math.cos(roll);    var sinc = Math.sin(roll);    var Axx = cosa*cosb;    var Axy = cosa*sinb*sinc - sina*cosc;    var Axz = cosa*sinb*cosc + sina*sinc;    var Ayx = sina*cosb;    var Ayy = sina*sinb*sinc + cosa*cosc;    var Ayz = sina*sinb*cosc - cosa*sinc;    var Azx = -sinb;    var Azy = cosb*sinc;    var Azz = cosb*cosc;    px = temp_position[i][0];    py = temp_position[i][1];    pz = temp_position[i][2];    node_x = Axx*px + Axy*py + Axz*pz;    node_y = Ayx*px + Ayy*py + Ayz*pz;    node_z = Azx*px + Azy*py + Azz*pz;    // Setting position    if (node_obj.positionX) {        // node_obj.positionX.value = node_x;        // node_obj.positionY.value = node_y;        // node_obj.positionZ.value = node_z;        // node_obj.positionX.setValueAtTime(node_x, context.currentTime);        // node_obj.positionY.setValueAtTime(node_y, context.currentTime);        // node_obj.positionZ.setValueAtTime(node_z, context.currentTime);        node_obj.positionX.linearRampToValueAtTime(node_x, context.currentTime + 0.1);        node_obj.positionY.linearRampToValueAtTime(node_y, context.currentTime + 0.1);        node_obj.positionZ.linearRampToValueAtTime(node_z, context.currentTime + 0.1);    } else {        node_obj.setPosition(node_x, node_y, node_z);    }    pannerNodesObjects[i] = node_obj;    if(i==0){        console.log("Front Left x: " + pannerNodesObjects[i].positionX.value + " y: " + pannerNodesObjects[i].positionY.value  + " z: " + pannerNodesObjects[i].positionZ.value );    }    if(i==1){        console.log("Front Right x: " + pannerNodesObjects[i].positionX.value + " y: " + pannerNodesObjects[i].positionY.value + " z: " + pannerNodesObjects[i].positionZ.value );    }    if(i==2){        console.log("Front Center x: " + pannerNodesObjects[i].positionX.value + " y: " + pannerNodesObjects[i].positionY.value + " z: " + pannerNodesObjects[i].positionZ.value );    }    }// Starting function creates PannerNodes and adds parameters to themfunction start_function() {    screenCenterY = estimate_ScreenParams();    console.log("Distance from screen in start_function: " + distanceFromScreen);    console.log("screenCenterY in start_function: " + screenCenterY);    if (listener.forwardX) {        listener.forwardX.setValueAtTime(0, context.currentTime);        listener.forwardY.setValueAtTime(0, context.currentTime);        listener.forwardZ.setValueAtTime(-1, context.currentTime);        listener.upX.setValueAtTime(0, context.currentTime);        listener.upY.setValueAtTime(1, context.currentTime);        listener.upZ.setValueAtTime(0, context.currentTime);    } else {        listener.setOrientation(0, 0, -1, 0, 1, 0);    }    var angleList = [90, -90, 0, -110, 110, -20];    for (i = 0; i < pannerNodesObjects.length; i++) {        [nx, ny, nz] = set_rotation(distanceFromScreen, screenCenterY, angleList[i], i);        set_pannerNode(pannerNodesObjects[i], 'HRTF', "exponential", 1, 100, 2, 360, 0, 0, nx, ny, nz, i);        splitter.connect(pannerNodesObjects[i], i);        pannerNodesObjects[i].connect(context.destination);    }    console.log(context.destination);    console.log("start_function Finished!");}video{ height: 150px }<!doctype html><html><head>  <meta charset="utf-8">  <title>MyTitle</title>  <base href="/">  <meta name="viewport" content="width=device-width, initial-scale=1">  <link rel="icon" type="image/x-icon" href="favicon.ico">  <script src="https://unpkg.com/mathjs@7.6.0/dist/math.js"></script></head><body>  <button type="button" id="start_web_audio" onclick="startFunction()">Click to allow Web Audio</button> <p>Required to start webaudio API due to changes in autoplay policy on modern browsers</p>    <!-- tell the server we want to read it -->    <video crossorigin="anonymous" src="https://upload.wikimedia.org/wikipedia/commons/2/22/Volcano_Lava_Sample.webm" controls id="video" loop></video></body></html>
打开App,查看更多内容
随时随地看视频慕课网APP

相关分类

JavaScript