Merge pull request #717 from vasilymilovidov/reverb

Add support for using samples as impulse response buffers for the reverb
This commit is contained in:
Felix Roos 2023-10-10 00:17:59 +02:00 committed by GitHub
commit a2ee70a964
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 97 additions and 44 deletions

View File

@ -1030,6 +1030,15 @@ const generic_params = [
*
*/
['roomfade', 'rfade'],
/**
* Sets the sample to use as an impulse response for the reverb. * * @name iresponse
* @param {string | Pattern} sample to use as an impulse response
* @synonyms ir
* @example
* s("bd sd").room(.8).ir("<shaker_large:0 shaker_large:2>")
*
*/
[['ir', 'i'], 'iresponse'],
/**
* Sets the room size of the reverb, see {@link room}.
* When this property is changed, the reverb will be recaculated, so only change this sparsely..

View File

@ -1,28 +1,47 @@
import reverbGen from './reverbGen.mjs';
if (typeof AudioContext !== 'undefined') {
AudioContext.prototype.createReverb = function (duration, fade, lp, dim) {
AudioContext.prototype.adjustLength = function (duration, buffer) {
const newLength = buffer.sampleRate * duration;
const newBuffer = this.createBuffer(buffer.numberOfChannels, buffer.length, buffer.sampleRate);
for (let channel = 0; channel < buffer.numberOfChannels; channel++) {
let oldData = buffer.getChannelData(channel);
let newData = newBuffer.getChannelData(channel);
for (let i = 0; i < newLength; i++) {
newData[i] = oldData[i] || 0;
}
}
return newBuffer;
};
AudioContext.prototype.createReverb = function (duration, fade, lp, dim, ir) {
const convolver = this.createConvolver();
convolver.generate = (d = 2, fade = 0.1, lp = 15000, dim = 1000) => {
reverbGen.generateReverb(
{
audioContext: this,
numChannels: 2,
decayTime: d,
fadeInTime: fade,
lpFreqStart: lp,
lpFreqEnd: dim,
},
(buffer) => {
convolver.buffer = buffer;
},
);
convolver.generate = (d = 2, fade = 0.1, lp = 15000, dim = 1000, ir) => {
convolver.duration = d;
convolver.fade = fade;
convolver.lp = lp;
convolver.dim = dim;
convolver.ir = ir;
if (ir) {
convolver.buffer = this.adjustLength(d, ir);
} else {
reverbGen.generateReverb(
{
audioContext: this,
numChannels: 2,
decayTime: d,
fadeInTime: fade,
lpFreqStart: lp,
lpFreqEnd: dim,
},
(buffer) => {
convolver.buffer = buffer;
},
);
}
};
convolver.generate(duration, fade, lp, dim);
convolver.generate(duration, fade, lp, dim, ir);
return convolver;
};
}

View File

@ -16,11 +16,11 @@ var reverbGen = {};
/** Generates a reverb impulse response.
@param {!Object} params TODO: Document the properties.
@param {!function(!AudioBuffer)} callback Function to call when
the impulse response has been generated. The impulse response
is passed to this function as its parameter. May be called
immediately within the current execution context, or later. */
@param {!Object} params TODO: Document the properties.
@param {!function(!AudioBuffer)} callback Function to call when
the impulse response has been generated. The impulse response
is passed to this function as its parameter. May be called
immediately within the current execution context, or later. */
reverbGen.generateReverb = function (params, callback) {
var audioContext = params.audioContext || new AudioContext();
var sampleRate = audioContext.sampleRate;
@ -48,12 +48,13 @@ reverbGen.generateReverb = function (params, callback) {
/** Creates a canvas element showing a graph of the given data.
@param {!Float32Array} data An array of numbers, or a Float32Array.
@param {number} width Width in pixels of the canvas.
@param {number} height Height in pixels of the canvas.
@param {number} min Minimum value of data for the graph (lower edge).
@param {number} max Maximum value of data in the graph (upper edge).
@return {!CanvasElement} The generated canvas element. */
@param {!Float32Array} data An array of numbers, or a Float32Array.
@param {number} width Width in pixels of the canvas.
@param {number} height Height in pixels of the canvas.
@param {number} min Minimum value of data for the graph (lower edge).
@param {number} max Maximum value of data in the graph (upper edge).
@return {!CanvasElement} The generated canvas element. */
reverbGen.generateGraph = function (data, width, height, min, max) {
var canvas = document.createElement('canvas');
canvas.width = width;
@ -72,13 +73,13 @@ reverbGen.generateGraph = function (data, width, height, min, max) {
/** Applies a constantly changing lowpass filter to the given sound.
@private
@param {!AudioBuffer} input
@param {number} lpFreqStart
@param {number} lpFreqEnd
@param {number} lpFreqEndAt
@param {!function(!AudioBuffer)} callback May be called
immediately within the current execution context, or later.*/
@private
@param {!AudioBuffer} input
@param {number} lpFreqStart
@param {number} lpFreqEnd
@param {number} lpFreqEndAt
@param {!function(!AudioBuffer)} callback May be called
immediately within the current execution context, or later.*/
var applyGradualLowpass = function (input, lpFreqStart, lpFreqEnd, lpFreqEndAt, callback) {
if (lpFreqStart == 0) {
callback(input);
@ -110,8 +111,8 @@ var applyGradualLowpass = function (input, lpFreqStart, lpFreqEnd, lpFreqEndAt,
};
/** @private
@param {!AudioBuffer} buffer
@return {!Array.<!Float32Array>} An array containing the Float32Array of each channel's samples. */
@param {!AudioBuffer} buffer
@return {!Array.<!Float32Array>} An array containing the Float32Array of each channel's samples. */
var getAllChannelData = function (buffer) {
var channels = [];
for (var i = 0; i < buffer.numberOfChannels; i++) {
@ -121,7 +122,7 @@ var getAllChannelData = function (buffer) {
};
/** @private
@return {number} A random number from -1 to 1. */
@return {number} A random number from -1 to 1. */
var randomSample = function () {
return Math.random() * 2 - 1;
};

View File

@ -12,14 +12,18 @@ import workletsUrl from './worklets.mjs?url';
import { createFilter, gainNode, getCompressor } from './helpers.mjs';
import { map } from 'nanostores';
import { logger } from './logger.mjs';
import { loadBuffer } from './sampler.mjs';
export const soundMap = map();
export function registerSound(key, onTrigger, data = {}) {
soundMap.setKey(key, { onTrigger, data });
}
export function getSound(s) {
return soundMap.get()[s];
}
export const resetLoadedSounds = () => soundMap.set({});
let audioContext;
@ -46,6 +50,7 @@ export const panic = () => {
};
let workletsLoading;
function loadWorklets() {
if (workletsLoading) {
return workletsLoading;
@ -89,6 +94,7 @@ export async function initAudioOnFirstClick(options) {
let delays = {};
const maxfeedback = 0.98;
function getDelay(orbit, delaytime, delayfeedback, t) {
if (delayfeedback > maxfeedback) {
//logger(`delayfeedback was clamped to ${maxfeedback} to save your ears`);
@ -110,33 +116,33 @@ let reverbs = {};
let hasChanged = (now, before) => now !== undefined && now !== before;
function getReverb(orbit, duration, fade, lp, dim) {
function getReverb(orbit, duration, fade, lp, dim, ir) {
// If no reverb has been created for a given orbit, create one
if (!reverbs[orbit]) {
const ac = getAudioContext();
const reverb = ac.createReverb(duration, fade, lp, dim);
const reverb = ac.createReverb(duration, fade, lp, dim, ir);
reverb.connect(getDestination());
reverbs[orbit] = reverb;
}
if (
hasChanged(duration, reverbs[orbit].duration) ||
hasChanged(fade, reverbs[orbit].fade) ||
hasChanged(lp, reverbs[orbit].lp) ||
hasChanged(dim, reverbs[orbit].dim)
hasChanged(dim, reverbs[orbit].dim) ||
reverbs[orbit].ir !== ir
) {
// only regenerate when something has changed
// avoids endless regeneration on things like
// stack(s("a"), s("b").rsize(8)).room(.5)
// this only works when args may stay undefined until here
// setting default values breaks this
reverbs[orbit].generate(duration, fade, lp, dim);
reverbs[orbit].generate(duration, fade, lp, dim, ir);
}
return reverbs[orbit];
}
export let analyser, analyserData /* s = {} */;
export function getAnalyser(/* orbit, */ fftSize = 2048) {
if (!analyser /*s [orbit] */) {
const analyserNode = getAudioContext().createAnalyser();
@ -235,6 +241,8 @@ export const superdough = async (value, deadline, hapDuration) => {
roomlp,
roomdim,
roomsize,
ir,
i = 0,
velocity = 1,
analyze, // analyser wet
fft = 8, // fftSize 0 - 10
@ -271,6 +279,7 @@ export const superdough = async (value, deadline, hapDuration) => {
// this can be used for things like speed(0) in the sampler
return;
}
if (ac.currentTime > t) {
logger('[webaudio] skip hap: still loading', ac.currentTime - t);
return;
@ -383,7 +392,18 @@ export const superdough = async (value, deadline, hapDuration) => {
// reverb
let reverbSend;
if (room > 0) {
const reverbNode = getReverb(orbit, roomsize, roomfade, roomlp, roomdim);
let roomIR;
if (ir !== undefined) {
let url;
let sample = getSound(ir);
if (Array.isArray(sample)) {
url = sample.data.samples[i % sample.data.samples.length];
} else if (typeof sample === 'object') {
url = Object.values(sample.data.samples).flat()[i % Object.values(sample.data.samples).length];
}
roomIR = await loadBuffer(url, ac, ir, 0);
}
const reverbNode = getReverb(orbit, roomsize, roomfade, roomlp, roomdim, roomIR);
reverbSend = effectSend(post, reverbNode, room);
}

View File

@ -227,4 +227,8 @@ global effects use the same chain for all events of the same orbit:
<JsDoc client:idle name="roomdim" h={0} />
### iresponse
<JsDoc client:idle name="iresponse" h={0} />
Next, we'll look at strudel's support for [Csound](/learn/csound).