Merge pull request #1101 from tidalcycles/sampler-refactoring

refactor sampler
This commit is contained in:
Felix Roos 2024-09-14 10:49:21 +02:00 committed by GitHub
commit 9c99f5ada2
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
3 changed files with 65 additions and 51 deletions

View File

@ -22,16 +22,13 @@ function humanFileSize(bytes, si) {
return bytes.toFixed(1) + ' ' + units[u]; return bytes.toFixed(1) + ' ' + units[u];
} }
export const getSampleBufferSource = async (s, n, note, speed, freq, bank, resolveUrl) => { // deduces relevant info for sample loading from hap.value and sample definition
let transpose = 0; // it encapsulates the core sampler logic into a pure and synchronous function
if (freq !== undefined && note !== undefined) { // hapValue: Hap.value, bank: sample bank definition for sound "s" (values in strudel.json format)
logger('[sampler] hap has note and freq. ignoring note', 'warning'); export function getSampleInfo(hapValue, bank) {
} const { s, n = 0, speed = 1.0 } = hapValue;
let midi = valueToMidi({ freq, note }, 36); let midi = valueToMidi(hapValue, 36);
transpose = midi - 36; // C3 is middle C let transpose = midi - 36; // C3 is middle C;
const ac = getAudioContext();
let sampleUrl; let sampleUrl;
let index = 0; let index = 0;
if (Array.isArray(bank)) { if (Array.isArray(bank)) {
@ -50,19 +47,54 @@ export const getSampleBufferSource = async (s, n, note, speed, freq, bank, resol
index = getSoundIndex(n, bank[closest].length); index = getSoundIndex(n, bank[closest].length);
sampleUrl = bank[closest][index]; sampleUrl = bank[closest][index];
} }
const label = `${s}:${index}`;
let playbackRate = Math.abs(speed) * Math.pow(2, transpose / 12);
return { transpose, sampleUrl, index, midi, label, playbackRate };
}
// takes hapValue and returns buffer + playbackRate.
export const getSampleBuffer = async (hapValue, bank, resolveUrl) => {
let { sampleUrl, label, playbackRate } = getSampleInfo(hapValue, bank);
if (resolveUrl) { if (resolveUrl) {
sampleUrl = await resolveUrl(sampleUrl); sampleUrl = await resolveUrl(sampleUrl);
} }
let buffer = await loadBuffer(sampleUrl, ac, s, index); const ac = getAudioContext();
if (speed < 0) { const buffer = await loadBuffer(sampleUrl, ac, label);
if (hapValue.unit === 'c') {
playbackRate = playbackRate * buffer.duration;
}
return { buffer, playbackRate };
};
// creates playback ready AudioBufferSourceNode from hapValue
export const getSampleBufferSource = async (hapValue, bank, resolveUrl) => {
let { buffer, playbackRate } = await getSampleBuffer(hapValue, bank, resolveUrl);
if (hapValue.speed < 0) {
// should this be cached? // should this be cached?
buffer = reverseBuffer(buffer); buffer = reverseBuffer(buffer);
} }
const ac = getAudioContext();
const bufferSource = ac.createBufferSource(); const bufferSource = ac.createBufferSource();
bufferSource.buffer = buffer; bufferSource.buffer = buffer;
const playbackRate = 1.0 * Math.pow(2, transpose / 12);
bufferSource.playbackRate.value = playbackRate; bufferSource.playbackRate.value = playbackRate;
return bufferSource;
const { s, loopBegin = 0, loopEnd = 1, begin = 0, end = 1 } = hapValue;
// "The computation of the offset into the sound is performed using the sound buffer's natural sample rate,
// rather than the current playback rate, so even if the sound is playing at twice its normal speed,
// the midway point through a 10-second audio buffer is still 5."
const offset = begin * bufferSource.buffer.duration;
const loop = s.startsWith('wt_') ? 1 : hapValue.loop;
if (loop) {
bufferSource.loop = true;
bufferSource.loopStart = loopBegin * bufferSource.buffer.duration - offset;
bufferSource.loopEnd = loopEnd * bufferSource.buffer.duration - offset;
}
const bufferDuration = bufferSource.buffer.duration / bufferSource.playbackRate.value;
const sliceDuration = (end - begin) * bufferDuration;
return { bufferSource, offset, bufferDuration, sliceDuration };
}; };
export const loadBuffer = (url, ac, s, n = 0) => { export const loadBuffer = (url, ac, s, n = 0) => {
@ -232,10 +264,10 @@ export const samples = async (sampleMap, baseUrl = sampleMap._base || '', option
const { prebake, tag } = options; const { prebake, tag } = options;
processSampleMap( processSampleMap(
sampleMap, sampleMap,
(key, value) => (key, bank) =>
registerSound(key, (t, hapValue, onended) => onTriggerSample(t, hapValue, onended, value), { registerSound(key, (t, hapValue, onended) => onTriggerSample(t, hapValue, onended, bank), {
type: 'sample', type: 'sample',
samples: value, samples: bank,
baseUrl, baseUrl,
prebake, prebake,
tag, tag,
@ -249,38 +281,26 @@ const cutGroups = [];
export async function onTriggerSample(t, value, onended, bank, resolveUrl) { export async function onTriggerSample(t, value, onended, bank, resolveUrl) {
let { let {
s, s,
freq,
unit,
nudge = 0, // TODO: is this in seconds? nudge = 0, // TODO: is this in seconds?
cut, cut,
loop, loop,
clip = undefined, // if set, samples will be cut off when the hap ends clip = undefined, // if set, samples will be cut off when the hap ends
n = 0, n = 0,
note,
speed = 1, // sample playback speed speed = 1, // sample playback speed
loopBegin = 0,
begin = 0,
loopEnd = 1,
end = 1,
duration, duration,
} = value; } = value;
// load sample // load sample
if (speed === 0) { if (speed === 0) {
// no playback // no playback
return; return;
} }
loop = s.startsWith('wt_') ? 1 : value.loop;
const ac = getAudioContext(); const ac = getAudioContext();
// destructure adsr here, because the default should be different for synths and samples // destructure adsr here, because the default should be different for synths and samples
let [attack, decay, sustain, release] = getADSRValues([value.attack, value.decay, value.sustain, value.release]); let [attack, decay, sustain, release] = getADSRValues([value.attack, value.decay, value.sustain, value.release]);
//const soundfont = getSoundfontKey(s);
const time = t + nudge;
const bufferSource = await getSampleBufferSource(s, n, note, speed, freq, bank, resolveUrl); const { bufferSource, sliceDuration, offset } = await getSampleBufferSource(value, bank, resolveUrl);
// vibrato
let vibratoOscillator = getVibratoOscillator(bufferSource.detune, value, t);
// asny stuff above took too long? // asny stuff above took too long?
if (ac.currentTime > t) { if (ac.currentTime > t) {
@ -292,26 +312,19 @@ export async function onTriggerSample(t, value, onended, bank, resolveUrl) {
logger(`[sampler] could not load "${s}:${n}"`, 'error'); logger(`[sampler] could not load "${s}:${n}"`, 'error');
return; return;
} }
bufferSource.playbackRate.value = Math.abs(speed) * bufferSource.playbackRate.value;
if (unit === 'c') { // vibrato
// are there other units? let vibratoOscillator = getVibratoOscillator(bufferSource.detune, value, t);
bufferSource.playbackRate.value = bufferSource.playbackRate.value * bufferSource.buffer.duration * 1; //cps;
} const time = t + nudge;
// "The computation of the offset into the sound is performed using the sound buffer's natural sample rate,
// rather than the current playback rate, so even if the sound is playing at twice its normal speed,
// the midway point through a 10-second audio buffer is still 5."
const offset = begin * bufferSource.buffer.duration;
if (loop) {
bufferSource.loop = true;
bufferSource.loopStart = loopBegin * bufferSource.buffer.duration - offset;
bufferSource.loopEnd = loopEnd * bufferSource.buffer.duration - offset;
}
bufferSource.start(time, offset); bufferSource.start(time, offset);
const envGain = ac.createGain(); const envGain = ac.createGain();
const node = bufferSource.connect(envGain); const node = bufferSource.connect(envGain);
// if none of these controls is set, the duration of the sound will be set to the duration of the sample slice
if (clip == null && loop == null && value.release == null) { if (clip == null && loop == null && value.release == null) {
const bufferDuration = bufferSource.buffer.duration / bufferSource.playbackRate.value; duration = sliceDuration;
duration = (end - begin) * bufferDuration;
} }
let holdEnd = t + duration; let holdEnd = t + duration;

View File

@ -421,6 +421,7 @@ export const superdough = async (value, t, hapDuration) => {
}; };
if (bank && s) { if (bank && s) {
s = `${bank}_${s}`; s = `${bank}_${s}`;
value.s = s;
} }
// get source AudioNode // get source AudioNode

View File

@ -23,10 +23,10 @@ async function hasStrudelJson(subpath) {
async function loadStrudelJson(subpath) { async function loadStrudelJson(subpath) {
const contents = await readTextFile(subpath + '/strudel.json', { dir }); const contents = await readTextFile(subpath + '/strudel.json', { dir });
const sampleMap = JSON.parse(contents); const sampleMap = JSON.parse(contents);
processSampleMap(sampleMap, (key, value) => { processSampleMap(sampleMap, (key, bank) => {
registerSound(key, (t, hapValue, onended) => onTriggerSample(t, hapValue, onended, value, fileResolver(subpath)), { registerSound(key, (t, hapValue, onended) => onTriggerSample(t, hapValue, onended, bank, fileResolver(subpath)), {
type: 'sample', type: 'sample',
samples: value, samples: bank,
fileSystem: true, fileSystem: true,
tag: 'local', tag: 'local',
}); });