我有一个基于2d画布的网络音频游戏,其中包括空间化的音频源位于画布上使用网络音频api的特定像素坐标。
虽然我已经成功地使用web音频pannerNode在canvas元素上精确地定位了每个音频源,但如下所示:
var canvas = document.getElementById("map");
var context = canvas.getContext("2d");
function audioFileLoader(fileDirectory) {
var soundObj = {};
var playSound = undefined;
var panner = undefined;
var gainNode = undefined;
var getSound = new XMLHttpRequest();
soundObj.fileDirectory = fileDirectory;
getSound.open("GET", soundObj.fileDirectory, true);
getSound.responseType = "arraybuffer";
getSound.onload = function() {
audioContext.decodeAudioData(getSound.response, function(buffer) { soundObj.soundToPlay = buffer;
}); };
getSound.send();
panner = audioContext.createPanner();
panner.panningModel = 'HRTF';
soundObj.position = function(x,y,z) {
panner.setPosition(x,y,z);
};我现在正在尝试使用Resonance audio Web SDK升级音频空间化,这样我就可以使用它更高级的音频空间化特性。
如何使用Resonance audio的setPosition定义画布元素上音频源的位置(x,y)?
我似乎不知道如何在我的画布元素上将原生的共鸣音频刻度(米)转换为像素坐标。我假设如果我能解决这个问题,我就可以在2d游戏中定义不同音频室的大小和位置,这将是非常酷的。
谢谢。
发布于 2020-05-31 07:09:09
因此,如果您在画布上以像素为单位获取坐标,然后使用相同的单位(像素)来定位和更新侦听器的位置,那么一切都很好。只要你对你的信号源和你的听众使用相同的单位,那么它们仍然是相对于彼此的,并且共鸣音频空间化工作:
// Set some global variables
var canvas = document.getElementById("map");
var context = canvas.getContext("2d");
var mouseX;
var mouseY;
// Map event functions
// Get mouse coordinates on the map element
function updateCoords() {
mouseX = event.offsetX;
mouseY = event.offsetY;
}
// Create mouse event functions
function moveAroundMap(event) {
updateCoords();
mapX.innerText = mouseX;
mapY.innerText = mouseY;
// Update the listener position on the canvas in pixels (x,y)
resonanceAudioScene.setListenerPosition(mouseX,mouseY,-20); // elevate the listener rather than lowering the sources
}
map.addEventListener("mousemove", moveAroundMap, false);
// Create an AudioContext
let audioContext = new AudioContext();
// Create a (third-order Ambisonic) Resonance Audio scene and pass it
// the AudioContext.
let resonanceAudioScene = new ResonanceAudio(audioContext);
resonanceAudioScene.setAmbisonicOrder(3);
// Connect the scene’s binaural output to stereo out.
resonanceAudioScene.output.connect(audioContext.destination);
// Create an AudioElement.
let audioElement = document.createElement('audio');
// Load an audio file into the AudioElement.
audioElement.src = './samples/mono-seagulls.mp3';
audioElement.loop = true;
// Generate a MediaElementSource from the AudioElement.
let audioElementSource = audioContext.createMediaElementSource(audioElement);
// Add the MediaElementSource to the scene as an audio input source.
let source = resonanceAudioScene.createSource();
audioElementSource.connect(source.input);
// Set the source position relative to the listener
source.setPosition(140, 150, 0);https://stackoverflow.com/questions/62100897
复制相似问题