mirror of
https://github.com/eliasstepanik/strudel-docker.git
synced 2026-01-22 19:18:31 +00:00
Prepare to merge with PR #718
This commit is contained in:
parent
ac9c629c0b
commit
13cb32903a
@ -980,7 +980,62 @@ const generic_params = [
|
|||||||
* s("bd sd").room(.8).roomsize("<0 1 2 4 8>")
|
* s("bd sd").room(.8).roomsize("<0 1 2 4 8>")
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
/**
|
||||||
|
* Reverb lowpass starting frequency (in hertz).
|
||||||
|
*
|
||||||
|
* @name revlp
|
||||||
|
* @param {number} level between 0 and 20000hz
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(10000)
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(5000)
|
||||||
|
*/
|
||||||
|
['revlp'],
|
||||||
|
/**
|
||||||
|
* Reverb lowpass frequency at -60dB (in hertz).
|
||||||
|
*
|
||||||
|
* @name revdim
|
||||||
|
* @param {number} level between 0 and 20000hz
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(10000).revdim(8000)
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(5000).revdim(400)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
['revdim'],
|
||||||
|
/**
|
||||||
|
* Reverb fade time (in seconds).
|
||||||
|
*
|
||||||
|
* @name fade
|
||||||
|
* @param {number} seconds for the reverb to fade
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(10000).fade(0.5)
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(0.5).revlp(5000).fade(4)
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
['fade'],
|
||||||
|
/**
|
||||||
|
* Sets the sample to use as an impulse response for the reverb.
|
||||||
|
*
|
||||||
|
* @name iresponse
|
||||||
|
* @param {string | Pattern} sample sample to pick as an impulse response
|
||||||
|
* @synonyms ir
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(.8).ir("<shaker_large:0 shaker_large:2>")
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
[['ir', 'i'], 'iresponse'],
|
||||||
|
/**
|
||||||
|
* Sets the room size of the reverb, see {@link room}.
|
||||||
|
*
|
||||||
|
* @name roomsize
|
||||||
|
* @param {number | Pattern} size between 0 and 10
|
||||||
|
* @synonyms size, sz
|
||||||
|
* @example
|
||||||
|
* s("bd sd").room(.8).roomsize("<0 1 2 4 8>")
|
||||||
|
*
|
||||||
|
*/
|
||||||
// TODO: find out why :
|
// TODO: find out why :
|
||||||
// s("bd sd").room(.8).roomsize("<0 .2 .4 .6 .8 [1,0]>").osc()
|
// s("bd sd").room(.8).roomsize("<0 .2 .4 .6 .8 [1,0]>").osc()
|
||||||
// .. does not work. Is it because room is only one effect?
|
// .. does not work. Is it because room is only one effect?
|
||||||
@ -990,17 +1045,6 @@ const generic_params = [
|
|||||||
// ['sclaves'],
|
// ['sclaves'],
|
||||||
// ['scrash'],
|
// ['scrash'],
|
||||||
|
|
||||||
/**
|
|
||||||
* Sets the sample to use as an impulse response for the reverb.
|
|
||||||
*
|
|
||||||
* @name iresponse
|
|
||||||
* @param {string | Pattern} Sets the impulse response
|
|
||||||
* @example
|
|
||||||
* s("bd sd").room(.8).ir("<shaker_large:0 shaker_large:2>")
|
|
||||||
*
|
|
||||||
*/
|
|
||||||
[['ir', 'i'], 'iresponse'],
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Wave shaping distortion. CAUTION: it might get loud
|
* Wave shaping distortion. CAUTION: it might get loud
|
||||||
*
|
*
|
||||||
|
|||||||
@ -1,11 +1,7 @@
|
|||||||
|
import reverbGen from './reverbGen.mjs';
|
||||||
|
|
||||||
if (typeof AudioContext !== 'undefined') {
|
if (typeof AudioContext !== 'undefined') {
|
||||||
AudioContext.prototype.impulseResponse = function (duration, channels = 1) {
|
AudioContext.prototype.generateReverb = reverbGen.generateReverb;
|
||||||
const length = this.sampleRate * duration;
|
|
||||||
const impulse = this.createBuffer(channels, length, this.sampleRate);
|
|
||||||
const IR = impulse.getChannelData(0);
|
|
||||||
for (let i = 0; i < length; i++) IR[i] = (2 * Math.random() - 1) * Math.pow(1 - i / length, duration);
|
|
||||||
return impulse;
|
|
||||||
};
|
|
||||||
|
|
||||||
AudioContext.prototype.adjustLength = function (duration, buffer) {
|
AudioContext.prototype.adjustLength = function (duration, buffer) {
|
||||||
const newLength = buffer.sampleRate * duration;
|
const newLength = buffer.sampleRate * duration;
|
||||||
@ -21,17 +17,44 @@ if (typeof AudioContext !== 'undefined') {
|
|||||||
return newBuffer;
|
return newBuffer;
|
||||||
};
|
};
|
||||||
|
|
||||||
AudioContext.prototype.createReverb = function (duration, buffer) {
|
AudioContext.prototype.createReverb = function (audioContext, duration, fade, revlp, revdim, imp) {
|
||||||
const convolver = this.createConvolver();
|
const convolver = this.createConvolver();
|
||||||
convolver.setDuration = (dur, imp) => {
|
|
||||||
convolver.buffer = imp ? this.adjustLength(dur, imp) : this.impulseResponse(dur);
|
convolver.setDuration = (d, fade, revlp, revdim, imp) => {
|
||||||
|
if (imp) {
|
||||||
|
convolver.buffer = this.adjustLength(d, imp);
|
||||||
|
return convolver;
|
||||||
|
} else {
|
||||||
|
this.generateReverb(
|
||||||
|
{
|
||||||
|
audioContext,
|
||||||
|
sampleRate: 44100,
|
||||||
|
numChannels: 2,
|
||||||
|
decayTime: d,
|
||||||
|
fadeInTime: fade,
|
||||||
|
lpFreqStart: revlp,
|
||||||
|
lpFreqEnd: revdim,
|
||||||
|
},
|
||||||
|
(buffer) => {
|
||||||
|
convolver.buffer = buffer;
|
||||||
|
},
|
||||||
|
);
|
||||||
|
convolver.duration = duration;
|
||||||
|
convolver.fade = fade;
|
||||||
|
convolver.revlp = revlp;
|
||||||
|
convolver.revdim = revdim;
|
||||||
|
return convolver;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
convolver.setIR = (d, fade, revlp, revdim, imp) => {
|
||||||
|
if (imp) {
|
||||||
|
convolver.buffer = this.adjustLength(d, imp);
|
||||||
|
} else {
|
||||||
|
convolver.setDuration(d, fade, revlp, revdim, imp);
|
||||||
|
}
|
||||||
return convolver;
|
return convolver;
|
||||||
};
|
};
|
||||||
convolver.setIR = (dur, imp) => {
|
convolver.setDuration(duration, fade, revlp, revdim, imp);
|
||||||
convolver.buffer = imp ? this.adjustLength(dur, imp) : this.impulseResponse(dur);
|
|
||||||
return convolver;
|
|
||||||
};
|
|
||||||
convolver.setDuration(duration, buffer);
|
|
||||||
return convolver;
|
return convolver;
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|||||||
207
packages/superdough/reverbGen.mjs
Normal file
207
packages/superdough/reverbGen.mjs
Normal file
@ -0,0 +1,207 @@
|
|||||||
|
// Copyright 2014 Alan deLespinasse
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
var reverbGen = {};
|
||||||
|
|
||||||
|
/** Generates a reverb impulse response.
|
||||||
|
|
||||||
|
@param {!Object} params TODO: Document the properties.
|
||||||
|
@param {!function(!AudioBuffer)} callback Function to call when
|
||||||
|
the impulse response has been generated. The impulse response
|
||||||
|
is passed to this function as its parameter. May be called
|
||||||
|
immediately within the current execution context, or later. */
|
||||||
|
reverbGen.generateReverb = function (params, callback) {
|
||||||
|
var audioContext = params.audioContext || new AudioContext();
|
||||||
|
var sampleRate = params.sampleRate || 44100;
|
||||||
|
var numChannels = params.numChannels || 2;
|
||||||
|
// params.decayTime is the -60dB fade time. We let it go 50% longer to get to -90dB.
|
||||||
|
var totalTime = params.decayTime * 1.5;
|
||||||
|
var decaySampleFrames = Math.round(params.decayTime * sampleRate);
|
||||||
|
var numSampleFrames = Math.round(totalTime * sampleRate);
|
||||||
|
var fadeInSampleFrames = Math.round((params.fadeInTime || 0) * sampleRate);
|
||||||
|
// 60dB is a factor of 1 million in power, or 1000 in amplitude.
|
||||||
|
var decayBase = Math.pow(1 / 1000, 1 / decaySampleFrames);
|
||||||
|
var reverbIR = audioContext.createBuffer(numChannels, numSampleFrames, sampleRate);
|
||||||
|
for (var i = 0; i < numChannels; i++) {
|
||||||
|
var chan = reverbIR.getChannelData(i);
|
||||||
|
for (var j = 0; j < numSampleFrames; j++) {
|
||||||
|
chan[j] = randomSample() * Math.pow(decayBase, j);
|
||||||
|
}
|
||||||
|
for (var j = 0; j < fadeInSampleFrames; j++) {
|
||||||
|
chan[j] *= j / fadeInSampleFrames;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
applyGradualLowpass(reverbIR, params.lpFreqStart || 0, params.lpFreqEnd || 0, params.decayTime, callback);
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Creates a canvas element showing a graph of the given data.
|
||||||
|
|
||||||
|
@param {!Float32Array} data An array of numbers, or a Float32Array.
|
||||||
|
@param {number} width Width in pixels of the canvas.
|
||||||
|
@param {number} height Height in pixels of the canvas.
|
||||||
|
@param {number} min Minimum value of data for the graph (lower edge).
|
||||||
|
@param {number} max Maximum value of data in the graph (upper edge).
|
||||||
|
@return {!CanvasElement} The generated canvas element. */
|
||||||
|
reverbGen.generateGraph = function (data, width, height, min, max) {
|
||||||
|
var canvas = document.createElement('canvas');
|
||||||
|
canvas.width = width;
|
||||||
|
canvas.height = height;
|
||||||
|
var gc = canvas.getContext('2d');
|
||||||
|
gc.fillStyle = '#000';
|
||||||
|
gc.fillRect(0, 0, canvas.width, canvas.height);
|
||||||
|
gc.fillStyle = '#fff';
|
||||||
|
var xscale = width / data.length;
|
||||||
|
var yscale = height / (max - min);
|
||||||
|
for (var i = 0; i < data.length; i++) {
|
||||||
|
gc.fillRect(i * xscale, height - (data[i] - min) * yscale, 1, 1);
|
||||||
|
}
|
||||||
|
return canvas;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Saves an AudioBuffer as a 16-bit WAV file on the client's host
|
||||||
|
file system. Normalizes it to peak at +-32767, and optionally
|
||||||
|
truncates it if there's a lot of "silence" at the end.
|
||||||
|
|
||||||
|
@param {!AudioBuffer} buffer The buffer to save.
|
||||||
|
@param {string} name Name of file to create.
|
||||||
|
@param {number?} opt_minTail Defines what counts as "silence" for
|
||||||
|
auto-truncating the buffer. If there is a point past which every
|
||||||
|
value of every channel is less than opt_minTail, then the buffer
|
||||||
|
is truncated at that point. This is expressed as an integer,
|
||||||
|
applying to the post-normalized and integer-converted
|
||||||
|
buffer. The default is 0, meaning don't truncate. */
|
||||||
|
reverbGen.saveWavFile = function (buffer, name, opt_minTail) {
|
||||||
|
var bitsPerSample = 16;
|
||||||
|
var bytesPerSample = 2;
|
||||||
|
var sampleRate = buffer.sampleRate;
|
||||||
|
var numChannels = buffer.numberOfChannels;
|
||||||
|
var channels = getAllChannelData(buffer);
|
||||||
|
var numSampleFrames = channels[0].length;
|
||||||
|
var scale = 32767;
|
||||||
|
// Find normalization constant.
|
||||||
|
var max = 0;
|
||||||
|
for (var i = 0; i < numChannels; i++) {
|
||||||
|
for (var j = 0; j < numSampleFrames; j++) {
|
||||||
|
max = Math.max(max, Math.abs(channels[i][j]));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (max) {
|
||||||
|
scale = 32767 / max;
|
||||||
|
}
|
||||||
|
// Find truncation point.
|
||||||
|
if (opt_minTail) {
|
||||||
|
var truncateAt = 0;
|
||||||
|
for (var i = 0; i < numChannels; i++) {
|
||||||
|
for (var j = 0; j < numSampleFrames; j++) {
|
||||||
|
var absSample = Math.abs(Math.round(scale * channels[i][j]));
|
||||||
|
if (absSample > opt_minTail) {
|
||||||
|
truncateAt = j;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
numSampleFrames = truncateAt + 1;
|
||||||
|
}
|
||||||
|
var sampleDataBytes = bytesPerSample * numChannels * numSampleFrames;
|
||||||
|
var fileBytes = sampleDataBytes + 44;
|
||||||
|
var arrayBuffer = new ArrayBuffer(fileBytes);
|
||||||
|
var dataView = new DataView(arrayBuffer);
|
||||||
|
dataView.setUint32(0, 1179011410, true); // "RIFF"
|
||||||
|
dataView.setUint32(4, fileBytes - 8, true); // file length
|
||||||
|
dataView.setUint32(8, 1163280727, true); // "WAVE"
|
||||||
|
dataView.setUint32(12, 544501094, true); // "fmt "
|
||||||
|
dataView.setUint32(16, 16, true); // fmt chunk length
|
||||||
|
dataView.setUint16(20, 1, true); // PCM format
|
||||||
|
dataView.setUint16(22, numChannels, true); // NumChannels
|
||||||
|
dataView.setUint32(24, sampleRate, true); // SampleRate
|
||||||
|
var bytesPerSampleFrame = numChannels * bytesPerSample;
|
||||||
|
dataView.setUint32(28, sampleRate * bytesPerSampleFrame, true); // ByteRate
|
||||||
|
dataView.setUint16(32, bytesPerSampleFrame, true); // BlockAlign
|
||||||
|
dataView.setUint16(34, bitsPerSample, true); // BitsPerSample
|
||||||
|
dataView.setUint32(36, 1635017060, true); // "data"
|
||||||
|
dataView.setUint32(40, sampleDataBytes, true);
|
||||||
|
for (var j = 0; j < numSampleFrames; j++) {
|
||||||
|
for (var i = 0; i < numChannels; i++) {
|
||||||
|
dataView.setInt16(44 + j * bytesPerSampleFrame + i * bytesPerSample, Math.round(scale * channels[i][j]), true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var blob = new Blob([arrayBuffer], { type: 'audio/wav' });
|
||||||
|
var url = window.URL.createObjectURL(blob);
|
||||||
|
var linkEl = document.createElement('a');
|
||||||
|
linkEl.href = url;
|
||||||
|
linkEl.download = name;
|
||||||
|
linkEl.style.display = 'none';
|
||||||
|
document.body.appendChild(linkEl);
|
||||||
|
linkEl.click();
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Applies a constantly changing lowpass filter to the given sound.
|
||||||
|
|
||||||
|
@private
|
||||||
|
@param {!AudioBuffer} input
|
||||||
|
@param {number} lpFreqStart
|
||||||
|
@param {number} lpFreqEnd
|
||||||
|
@param {number} lpFreqEndAt
|
||||||
|
@param {!function(!AudioBuffer)} callback May be called
|
||||||
|
immediately within the current execution context, or later.*/
|
||||||
|
var applyGradualLowpass = function (input, lpFreqStart, lpFreqEnd, lpFreqEndAt, callback) {
|
||||||
|
if (lpFreqStart == 0) {
|
||||||
|
callback(input);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
var channelData = getAllChannelData(input);
|
||||||
|
var context = new OfflineAudioContext(input.numberOfChannels, channelData[0].length, input.sampleRate);
|
||||||
|
var player = context.createBufferSource();
|
||||||
|
player.buffer = input;
|
||||||
|
var filter = context.createBiquadFilter();
|
||||||
|
|
||||||
|
lpFreqStart = Math.min(lpFreqStart, input.sampleRate / 2);
|
||||||
|
lpFreqEnd = Math.min(lpFreqEnd, input.sampleRate / 2);
|
||||||
|
|
||||||
|
filter.type = 'lowpass';
|
||||||
|
filter.Q.value = 0.0001;
|
||||||
|
filter.frequency.setValueAtTime(lpFreqStart, 0);
|
||||||
|
filter.frequency.linearRampToValueAtTime(lpFreqEnd, lpFreqEndAt);
|
||||||
|
|
||||||
|
player.connect(filter);
|
||||||
|
filter.connect(context.destination);
|
||||||
|
player.start();
|
||||||
|
context.oncomplete = function (event) {
|
||||||
|
callback(event.renderedBuffer);
|
||||||
|
};
|
||||||
|
context.startRendering();
|
||||||
|
|
||||||
|
window.filterNode = filter;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @private
|
||||||
|
@param {!AudioBuffer} buffer
|
||||||
|
@return {!Array.<!Float32Array>} An array containing the Float32Array of each channel's samples. */
|
||||||
|
var getAllChannelData = function (buffer) {
|
||||||
|
var channels = [];
|
||||||
|
for (var i = 0; i < buffer.numberOfChannels; i++) {
|
||||||
|
channels[i] = buffer.getChannelData(i);
|
||||||
|
}
|
||||||
|
return channels;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** @private
|
||||||
|
@return {number} A random number from -1 to 1. */
|
||||||
|
var randomSample = function () {
|
||||||
|
return Math.random() * 2 - 1;
|
||||||
|
};
|
||||||
|
|
||||||
|
export default reverbGen;
|
||||||
@ -114,20 +114,31 @@ function getDelay(orbit, delaytime, delayfeedback, t) {
|
|||||||
|
|
||||||
let reverbs = {};
|
let reverbs = {};
|
||||||
|
|
||||||
function getReverb(orbit, duration = 2, ir) {
|
function getReverb(orbit, duration = 2, fade, revlp, revdim, imp) {
|
||||||
if (!reverbs[orbit]) {
|
if (!reverbs[orbit]) {
|
||||||
const ac = getAudioContext();
|
const ac = getAudioContext();
|
||||||
const reverb = ac.createReverb(duration, ir);
|
const reverb = ac.createReverb(getAudioContext(), duration, fade, revlp, revdim, imp);
|
||||||
reverb.connect(getDestination());
|
reverb.connect(getDestination());
|
||||||
reverbs[orbit] = reverb;
|
reverbs[orbit] = reverb;
|
||||||
}
|
}
|
||||||
if (reverbs[orbit].duration !== duration) {
|
|
||||||
reverbs[orbit] = reverbs[orbit].setDuration(duration, ir);
|
const reverbOrbit = reverbs[orbit];
|
||||||
|
|
||||||
|
if (
|
||||||
|
reverbs[orbit].duration !== duration ||
|
||||||
|
reverbs[orbit].fade !== fade ||
|
||||||
|
reverbs[orbit].revlp !== revlp ||
|
||||||
|
reverbs[orbit].revdim !== revdim
|
||||||
|
) {
|
||||||
|
reverbs[orbit].setDuration(duration, fade, revlp, revdim);
|
||||||
reverbs[orbit].duration = duration;
|
reverbs[orbit].duration = duration;
|
||||||
|
reverbs[orbit].fade = fade;
|
||||||
|
reverbs[orbit].revlp = revlp;
|
||||||
|
reverbs[orbit].revdim = revdim;
|
||||||
}
|
}
|
||||||
if (reverbs[orbit].ir !== ir) {
|
if (reverbs[orbit].ir !== imp) {
|
||||||
reverbs[orbit] = reverbs[orbit].setIR(duration, ir);
|
reverbs[orbit] = reverbs[orbit].setIR(duration, fade, revlp, revdim, imp);
|
||||||
reverbs[orbit].ir = ir;
|
reverbs[orbit].ir = imp;
|
||||||
}
|
}
|
||||||
return reverbs[orbit];
|
return reverbs[orbit];
|
||||||
}
|
}
|
||||||
@ -227,12 +238,15 @@ export const superdough = async (value, deadline, hapDuration) => {
|
|||||||
delaytime = 0.25,
|
delaytime = 0.25,
|
||||||
orbit = 1,
|
orbit = 1,
|
||||||
room,
|
room,
|
||||||
|
fade = 0.1,
|
||||||
|
revlp = 15000,
|
||||||
|
revdim = 1000,
|
||||||
size = 2,
|
size = 2,
|
||||||
|
ir,
|
||||||
|
i = 0,
|
||||||
velocity = 1,
|
velocity = 1,
|
||||||
analyze, // analyser wet
|
analyze, // analyser wet
|
||||||
fft = 8, // fftSize 0 - 10
|
fft = 8, // fftSize 0 - 10
|
||||||
ir,
|
|
||||||
i = 0,
|
|
||||||
} = value;
|
} = value;
|
||||||
gain *= velocity; // legacy fix for velocity
|
gain *= velocity; // legacy fix for velocity
|
||||||
let toDisconnect = []; // audio nodes that will be disconnected when the source has ended
|
let toDisconnect = []; // audio nodes that will be disconnected when the source has ended
|
||||||
@ -380,7 +394,7 @@ export const superdough = async (value, deadline, hapDuration) => {
|
|||||||
}
|
}
|
||||||
let reverbSend;
|
let reverbSend;
|
||||||
if (room > 0 && size > 0) {
|
if (room > 0 && size > 0) {
|
||||||
const reverbNode = getReverb(orbit, size, buffer);
|
const reverbNode = getReverb(orbit, size, fade, revlp, revdim, buffer);
|
||||||
reverbSend = effectSend(post, reverbNode, room);
|
reverbSend = effectSend(post, reverbNode, room);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@ -203,4 +203,20 @@ global effects use the same chain for all events of the same orbit:
|
|||||||
|
|
||||||
<JsDoc client:idle name="roomsize" h={0} />
|
<JsDoc client:idle name="roomsize" h={0} />
|
||||||
|
|
||||||
|
## fade
|
||||||
|
|
||||||
|
<JsDoc client:idle name="fade" h={0} />
|
||||||
|
|
||||||
|
## revlp
|
||||||
|
|
||||||
|
<JsDoc client:idle name="revlp" h={0} />
|
||||||
|
|
||||||
|
## revdim
|
||||||
|
|
||||||
|
<JsDoc client:idle name="revdim" h={0} />
|
||||||
|
|
||||||
|
## iresponse
|
||||||
|
|
||||||
|
<JsDoc client:idle name="iresponse" h={0} />
|
||||||
|
|
||||||
Next, we'll look at strudel's support for [Csound](/learn/csound).
|
Next, we'll look at strudel's support for [Csound](/learn/csound).
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user