I am hoping to implement audio effects in the Web Audio API which require continuous access to two or more audio streams.
I can define a script processor with 2 input channels and 2 output channels:
var mod = context.createScriptProcessor(4096,2,2);
I can then connect a few sine waves to this processor:
mySine.connect(mod);
mySine2.connect(mod);
Is there a way to connect them to a specific input channel of the audio processor?
Eventually, when I write an onaudioprocess function and listen to each input channel individually, each input channel contains all the sounds connected to the processor. I have no way to access each sine wave individually within the onaudioprocess function. Is this correct? Or is there a way to connect sounds to a single input channel of the scriptprocessor?
You could either create two identical single channel ScriptProcessors or maybe use a channel merger to assign the two sine waves to each channel of the script processor like this:
var context = new AudioContext();
var sineA = context.createOscillator();
sineA.type = 'sine';
sineA.frequency.value = 300;
var sineB = context.createOscillator();
sineB.type = 'sine';
sineB.frequency.value = 100;
var script = context.createScriptProcessor(4096, 2, 2);
// create 2 channel merger node
var merger = context.createChannelMerger(2);
// connect sineA to channel 0
sineA.connect(merger, 0, 0);
// connect sineA to channel 1
sineB.connect(merger, 0, 1);
// connect the script to the merger
merger.connect(script);
// process the audio data of each channel
script.onaudioprocess = function(event) {
var input = event.inputBuffer;
var output = event.outputBuffer;
var inputA = input.getChannelData(0);
var inputB = input.getChannelData(1);
var outputA = output.getChannelData(0);
var outputB = output.getChannelData(1);
for (var i = 0; i < input.length; i++) {
outputA[i] = inputA[i];
outputB[i] = inputB[i];
}
}
script.connect(context.destination);
sineA.start();
sineB.start();