mirror of
https://github.com/eliasstepanik/strudel-docker.git
synced 2026-01-11 13:48:34 +00:00
scheduler error handling + style
This commit is contained in:
parent
45c7b29a96
commit
23e059a065
@ -39,7 +39,7 @@ export class Cyclist {
|
||||
}
|
||||
});
|
||||
} catch (e) {
|
||||
console.warn('scheduler error: ', e.message);
|
||||
onLog(`error: ${e.message}`);
|
||||
onError?.(e);
|
||||
}
|
||||
}, // called slightly before each cycle
|
||||
|
||||
@ -15,18 +15,22 @@ export function repl({
|
||||
}) {
|
||||
const scheduler = new Cyclist({
|
||||
interval,
|
||||
onTrigger: (hap, deadline, duration) => {
|
||||
if (!hap.context.onTrigger) {
|
||||
return defaultOutput(hap, deadline, duration);
|
||||
onTrigger: async (hap, deadline, duration) => {
|
||||
try {
|
||||
if (!hap.context.onTrigger) {
|
||||
return await defaultOutput(hap, deadline, duration);
|
||||
}
|
||||
const cps = 1; // TODO: fix
|
||||
// call signature of output / onTrigger is different...
|
||||
return await hap.context.onTrigger(getTime() + deadline, hap, getTime(), cps);
|
||||
} catch (err) {
|
||||
onLog?.(`[cyclist] error: ${err.message}`, 'error');
|
||||
}
|
||||
const cps = 1; // TODO: fix
|
||||
// call signature of output / onTrigger is different...
|
||||
return hap.context.onTrigger(getTime() + deadline, hap, getTime(), cps);
|
||||
},
|
||||
onError: onSchedulerError,
|
||||
getTime,
|
||||
onToggle,
|
||||
onLog: (message) => onLog?.(`[clock] ${message}`),
|
||||
onLog: (message, type) => onLog?.(`[cyclist] ${message}`, type),
|
||||
});
|
||||
const evaluate = async (code, autostart = true) => {
|
||||
if (!code) {
|
||||
|
||||
16
packages/react/dist/index.cjs.js
vendored
16
packages/react/dist/index.cjs.js
vendored
File diff suppressed because one or more lines are too long
412
packages/react/dist/index.es.js
vendored
412
packages/react/dist/index.es.js
vendored
File diff suppressed because one or more lines are too long
@ -8,7 +8,8 @@ export default createTheme({
|
||||
caret: '#ffcc00',
|
||||
selection: 'rgba(128, 203, 196, 0.5)',
|
||||
selectionMatch: '#036dd626',
|
||||
lineHighlight: '#8a91991a',
|
||||
// lineHighlight: '#8a91991a', // original
|
||||
lineHighlight: '#00000050',
|
||||
gutterBackground: 'transparent',
|
||||
// gutterForeground: '#8a919966',
|
||||
gutterForeground: '#8a919966',
|
||||
|
||||
@ -240,202 +240,198 @@ function effectSend(input, effect, wet) {
|
||||
|
||||
// export const webaudioOutput = async (t, hap, ct, cps) => {
|
||||
export const webaudioOutput = async (hap, deadline, hapDuration) => {
|
||||
try {
|
||||
const ac = getAudioContext();
|
||||
/* if (isNote(hap.value)) {
|
||||
const ac = getAudioContext();
|
||||
/* if (isNote(hap.value)) {
|
||||
// supports primitive hap values that look like notes
|
||||
hap.value = { note: hap.value };
|
||||
} */
|
||||
if (typeof hap.value !== 'object') {
|
||||
throw new Error(
|
||||
`hap.value ${hap.value} is not supported by webaudio output. Hint: append .note() or .s() to the end`,
|
||||
);
|
||||
}
|
||||
// calculate correct time (tone.js workaround)
|
||||
let t = ac.currentTime + deadline;
|
||||
// destructure value
|
||||
let {
|
||||
freq,
|
||||
s,
|
||||
bank,
|
||||
sf,
|
||||
clip = 0, // if 1, samples will be cut off when the hap ends
|
||||
n = 0,
|
||||
note,
|
||||
gain = 0.8,
|
||||
cutoff,
|
||||
resonance = 1,
|
||||
hcutoff,
|
||||
hresonance = 1,
|
||||
bandf,
|
||||
bandq = 1,
|
||||
coarse,
|
||||
crush,
|
||||
shape,
|
||||
pan,
|
||||
speed = 1, // sample playback speed
|
||||
begin = 0,
|
||||
end = 1,
|
||||
vowel,
|
||||
delay = 0,
|
||||
delayfeedback = 0.5,
|
||||
delaytime = 0.25,
|
||||
unit,
|
||||
nudge = 0, // TODO: is this in seconds?
|
||||
cut,
|
||||
loop,
|
||||
orbit = 1,
|
||||
room,
|
||||
size = 2,
|
||||
roomsize = size,
|
||||
} = hap.value;
|
||||
const { velocity = 1 } = hap.context;
|
||||
gain *= velocity; // legacy fix for velocity
|
||||
// the chain will hold all audio nodes that connect to each other
|
||||
const chain = [];
|
||||
if (bank && s) {
|
||||
s = `${bank}_${s}`;
|
||||
}
|
||||
if (typeof s === 'string') {
|
||||
[s, n] = splitSN(s, n);
|
||||
}
|
||||
if (typeof note === 'string') {
|
||||
[note, n] = splitSN(note, n);
|
||||
}
|
||||
if (!s || ['sine', 'square', 'triangle', 'sawtooth'].includes(s)) {
|
||||
// destructure adsr here, because the default should be different for synths and samples
|
||||
const { attack = 0.001, decay = 0.05, sustain = 0.6, release = 0.01 } = hap.value;
|
||||
// with synths, n and note are the same thing
|
||||
n = note || n || 36;
|
||||
if (typeof n === 'string') {
|
||||
n = toMidi(n); // e.g. c3 => 48
|
||||
}
|
||||
// get frequency
|
||||
if (!freq && typeof n === 'number') {
|
||||
freq = fromMidi(n); // + 48);
|
||||
}
|
||||
// make oscillator
|
||||
const o = getOscillator({ t, s, freq, duration: hapDuration, release });
|
||||
chain.push(o);
|
||||
// level down oscillators as they are really loud compared to samples i've tested
|
||||
chain.push(gainNode(0.3));
|
||||
// TODO: make adsr work with samples without pops
|
||||
// envelope
|
||||
const adsr = getADSR(attack, decay, sustain, release, 1, t, t + hapDuration);
|
||||
chain.push(adsr);
|
||||
} else {
|
||||
// destructure adsr here, because the default should be different for synths and samples
|
||||
const { attack = 0.001, decay = 0.001, sustain = 1, release = 0.001 } = hap.value;
|
||||
// load sample
|
||||
if (speed === 0) {
|
||||
// no playback
|
||||
return;
|
||||
}
|
||||
if (!s) {
|
||||
console.warn('no sample specified');
|
||||
return;
|
||||
}
|
||||
const soundfont = getSoundfontKey(s);
|
||||
let bufferSource;
|
||||
|
||||
try {
|
||||
if (soundfont) {
|
||||
// is soundfont
|
||||
bufferSource = await globalThis.getFontBufferSource(soundfont, note || n, ac);
|
||||
} else {
|
||||
// is sample from loaded samples(..)
|
||||
bufferSource = await getSampleBufferSource(s, n, note, speed);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(err);
|
||||
return;
|
||||
}
|
||||
// asny stuff above took too long?
|
||||
if (ac.currentTime > t) {
|
||||
console.warn('sample still loading:', s, n);
|
||||
return;
|
||||
}
|
||||
if (!bufferSource) {
|
||||
console.warn('no buffer source');
|
||||
return;
|
||||
}
|
||||
bufferSource.playbackRate.value = Math.abs(speed) * bufferSource.playbackRate.value;
|
||||
if (unit === 'c') {
|
||||
// are there other units?
|
||||
bufferSource.playbackRate.value = bufferSource.playbackRate.value * bufferSource.buffer.duration;
|
||||
}
|
||||
let duration = soundfont || clip ? hapDuration : bufferSource.buffer.duration / bufferSource.playbackRate.value;
|
||||
// "The computation of the offset into the sound is performed using the sound buffer's natural sample rate,
|
||||
// rather than the current playback rate, so even if the sound is playing at twice its normal speed,
|
||||
// the midway point through a 10-second audio buffer is still 5."
|
||||
const offset = begin * duration * bufferSource.playbackRate.value;
|
||||
duration = (end - begin) * duration;
|
||||
if (loop) {
|
||||
bufferSource.loop = true;
|
||||
bufferSource.loopStart = offset;
|
||||
bufferSource.loopEnd = offset + duration;
|
||||
duration = loop * duration;
|
||||
}
|
||||
t += nudge;
|
||||
|
||||
bufferSource.start(t, offset);
|
||||
if (cut !== undefined) {
|
||||
cutGroups[cut]?.stop(t); // fade out?
|
||||
cutGroups[cut] = bufferSource;
|
||||
}
|
||||
chain.push(bufferSource);
|
||||
bufferSource.stop(t + duration + release);
|
||||
const adsr = getADSR(attack, decay, sustain, release, 1, t, t + duration);
|
||||
chain.push(adsr);
|
||||
}
|
||||
|
||||
// gain stage
|
||||
chain.push(gainNode(gain));
|
||||
|
||||
// filters
|
||||
cutoff !== undefined && chain.push(getFilter('lowpass', cutoff, resonance));
|
||||
hcutoff !== undefined && chain.push(getFilter('highpass', hcutoff, hresonance));
|
||||
bandf !== undefined && chain.push(getFilter('bandpass', bandf, bandq));
|
||||
vowel !== undefined && chain.push(ac.createVowelFilter(vowel));
|
||||
|
||||
// effects
|
||||
coarse !== undefined && chain.push(getWorklet(ac, 'coarse-processor', { coarse }));
|
||||
crush !== undefined && chain.push(getWorklet(ac, 'crush-processor', { crush }));
|
||||
shape !== undefined && chain.push(getWorklet(ac, 'shape-processor', { shape }));
|
||||
|
||||
// panning
|
||||
if (pan !== undefined) {
|
||||
const panner = ac.createStereoPanner();
|
||||
panner.pan.value = 2 * pan - 1;
|
||||
chain.push(panner);
|
||||
}
|
||||
|
||||
// last gain
|
||||
const post = gainNode(1);
|
||||
chain.push(post);
|
||||
post.connect(getDestination());
|
||||
|
||||
// delay
|
||||
let delaySend;
|
||||
if (delay > 0 && delaytime > 0 && delayfeedback > 0) {
|
||||
const delyNode = getDelay(orbit, delaytime, delayfeedback, t);
|
||||
delaySend = effectSend(post, delyNode, delay);
|
||||
}
|
||||
// reverb
|
||||
let reverbSend;
|
||||
if (room > 0 && roomsize > 0) {
|
||||
const reverbNode = getReverb(orbit, roomsize);
|
||||
reverbSend = effectSend(post, reverbNode, room);
|
||||
}
|
||||
|
||||
// connect chain elements together
|
||||
chain.slice(1).reduce((last, current) => last.connect(current), chain[0]);
|
||||
|
||||
// disconnect all nodes when source node has ended:
|
||||
chain[0].onended = () => chain.concat([delaySend, reverbSend]).forEach((n) => n?.disconnect());
|
||||
} catch (e) {
|
||||
console.warn('.out error:', e);
|
||||
if (typeof hap.value !== 'object') {
|
||||
throw new Error(
|
||||
`hap.value ${hap.value} is not supported by webaudio output. Hint: append .note() or .s() to the end`,
|
||||
);
|
||||
}
|
||||
// calculate correct time (tone.js workaround)
|
||||
let t = ac.currentTime + deadline;
|
||||
// destructure value
|
||||
let {
|
||||
freq,
|
||||
s,
|
||||
bank,
|
||||
sf,
|
||||
clip = 0, // if 1, samples will be cut off when the hap ends
|
||||
n = 0,
|
||||
note,
|
||||
gain = 0.8,
|
||||
cutoff,
|
||||
resonance = 1,
|
||||
hcutoff,
|
||||
hresonance = 1,
|
||||
bandf,
|
||||
bandq = 1,
|
||||
coarse,
|
||||
crush,
|
||||
shape,
|
||||
pan,
|
||||
speed = 1, // sample playback speed
|
||||
begin = 0,
|
||||
end = 1,
|
||||
vowel,
|
||||
delay = 0,
|
||||
delayfeedback = 0.5,
|
||||
delaytime = 0.25,
|
||||
unit,
|
||||
nudge = 0, // TODO: is this in seconds?
|
||||
cut,
|
||||
loop,
|
||||
orbit = 1,
|
||||
room,
|
||||
size = 2,
|
||||
roomsize = size,
|
||||
} = hap.value;
|
||||
const { velocity = 1 } = hap.context;
|
||||
gain *= velocity; // legacy fix for velocity
|
||||
// the chain will hold all audio nodes that connect to each other
|
||||
const chain = [];
|
||||
if (bank && s) {
|
||||
s = `${bank}_${s}`;
|
||||
}
|
||||
if (typeof s === 'string') {
|
||||
[s, n] = splitSN(s, n);
|
||||
}
|
||||
if (typeof note === 'string') {
|
||||
[note, n] = splitSN(note, n);
|
||||
}
|
||||
if (!s || ['sine', 'square', 'triangle', 'sawtooth'].includes(s)) {
|
||||
// destructure adsr here, because the default should be different for synths and samples
|
||||
const { attack = 0.001, decay = 0.05, sustain = 0.6, release = 0.01 } = hap.value;
|
||||
// with synths, n and note are the same thing
|
||||
n = note || n || 36;
|
||||
if (typeof n === 'string') {
|
||||
n = toMidi(n); // e.g. c3 => 48
|
||||
}
|
||||
// get frequency
|
||||
if (!freq && typeof n === 'number') {
|
||||
freq = fromMidi(n); // + 48);
|
||||
}
|
||||
// make oscillator
|
||||
const o = getOscillator({ t, s, freq, duration: hapDuration, release });
|
||||
chain.push(o);
|
||||
// level down oscillators as they are really loud compared to samples i've tested
|
||||
chain.push(gainNode(0.3));
|
||||
// TODO: make adsr work with samples without pops
|
||||
// envelope
|
||||
const adsr = getADSR(attack, decay, sustain, release, 1, t, t + hapDuration);
|
||||
chain.push(adsr);
|
||||
} else {
|
||||
// destructure adsr here, because the default should be different for synths and samples
|
||||
const { attack = 0.001, decay = 0.001, sustain = 1, release = 0.001 } = hap.value;
|
||||
// load sample
|
||||
if (speed === 0) {
|
||||
// no playback
|
||||
return;
|
||||
}
|
||||
if (!s) {
|
||||
console.warn('no sample specified');
|
||||
return;
|
||||
}
|
||||
const soundfont = getSoundfontKey(s);
|
||||
let bufferSource;
|
||||
|
||||
try {
|
||||
if (soundfont) {
|
||||
// is soundfont
|
||||
bufferSource = await globalThis.getFontBufferSource(soundfont, note || n, ac);
|
||||
} else {
|
||||
// is sample from loaded samples(..)
|
||||
bufferSource = await getSampleBufferSource(s, n, note, speed);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn(err);
|
||||
return;
|
||||
}
|
||||
// asny stuff above took too long?
|
||||
if (ac.currentTime > t) {
|
||||
console.warn('sample still loading:', s, n);
|
||||
return;
|
||||
}
|
||||
if (!bufferSource) {
|
||||
console.warn('no buffer source');
|
||||
return;
|
||||
}
|
||||
bufferSource.playbackRate.value = Math.abs(speed) * bufferSource.playbackRate.value;
|
||||
if (unit === 'c') {
|
||||
// are there other units?
|
||||
bufferSource.playbackRate.value = bufferSource.playbackRate.value * bufferSource.buffer.duration;
|
||||
}
|
||||
let duration = soundfont || clip ? hapDuration : bufferSource.buffer.duration / bufferSource.playbackRate.value;
|
||||
// "The computation of the offset into the sound is performed using the sound buffer's natural sample rate,
|
||||
// rather than the current playback rate, so even if the sound is playing at twice its normal speed,
|
||||
// the midway point through a 10-second audio buffer is still 5."
|
||||
const offset = begin * duration * bufferSource.playbackRate.value;
|
||||
duration = (end - begin) * duration;
|
||||
if (loop) {
|
||||
bufferSource.loop = true;
|
||||
bufferSource.loopStart = offset;
|
||||
bufferSource.loopEnd = offset + duration;
|
||||
duration = loop * duration;
|
||||
}
|
||||
t += nudge;
|
||||
|
||||
bufferSource.start(t, offset);
|
||||
if (cut !== undefined) {
|
||||
cutGroups[cut]?.stop(t); // fade out?
|
||||
cutGroups[cut] = bufferSource;
|
||||
}
|
||||
chain.push(bufferSource);
|
||||
bufferSource.stop(t + duration + release);
|
||||
const adsr = getADSR(attack, decay, sustain, release, 1, t, t + duration);
|
||||
chain.push(adsr);
|
||||
}
|
||||
|
||||
// gain stage
|
||||
chain.push(gainNode(gain));
|
||||
|
||||
// filters
|
||||
cutoff !== undefined && chain.push(getFilter('lowpass', cutoff, resonance));
|
||||
hcutoff !== undefined && chain.push(getFilter('highpass', hcutoff, hresonance));
|
||||
bandf !== undefined && chain.push(getFilter('bandpass', bandf, bandq));
|
||||
vowel !== undefined && chain.push(ac.createVowelFilter(vowel));
|
||||
|
||||
// effects
|
||||
coarse !== undefined && chain.push(getWorklet(ac, 'coarse-processor', { coarse }));
|
||||
crush !== undefined && chain.push(getWorklet(ac, 'crush-processor', { crush }));
|
||||
shape !== undefined && chain.push(getWorklet(ac, 'shape-processor', { shape }));
|
||||
|
||||
// panning
|
||||
if (pan !== undefined) {
|
||||
const panner = ac.createStereoPanner();
|
||||
panner.pan.value = 2 * pan - 1;
|
||||
chain.push(panner);
|
||||
}
|
||||
|
||||
// last gain
|
||||
const post = gainNode(1);
|
||||
chain.push(post);
|
||||
post.connect(getDestination());
|
||||
|
||||
// delay
|
||||
let delaySend;
|
||||
if (delay > 0 && delaytime > 0 && delayfeedback > 0) {
|
||||
const delyNode = getDelay(orbit, delaytime, delayfeedback, t);
|
||||
delaySend = effectSend(post, delyNode, delay);
|
||||
}
|
||||
// reverb
|
||||
let reverbSend;
|
||||
if (room > 0 && roomsize > 0) {
|
||||
const reverbNode = getReverb(orbit, roomsize);
|
||||
reverbSend = effectSend(post, reverbNode, room);
|
||||
}
|
||||
|
||||
// connect chain elements together
|
||||
chain.slice(1).reduce((last, current) => last.connect(current), chain[0]);
|
||||
|
||||
// disconnect all nodes when source node has ended:
|
||||
chain[0].onended = () => chain.concat([delaySend, reverbSend]).forEach((n) => n?.disconnect());
|
||||
};
|
||||
|
||||
export const webaudioOutputTrigger = (t, hap, ct, cps) => webaudioOutput(hap, t - ct, hap.duration / cps);
|
||||
|
||||
@ -30,14 +30,14 @@ npm run static # <- test static build
|
||||
currently broken / buggy:
|
||||
|
||||
- [x] MiniREPL
|
||||
- [x] repl log section => now using browser console
|
||||
- [x] repl log section
|
||||
- [ ] hideHeader flag
|
||||
- [ ] pending flag
|
||||
- [x] web midi, TODO: test
|
||||
- [ ] draw / pianoroll
|
||||
- [x] repl url hash does not work
|
||||
- [x] pause does stop
|
||||
- [ ] pause then play logs "TOO LATE" and drops some events
|
||||
- [-] pause then play logs "TOO LATE" and drops some events => now doing full stop
|
||||
- [x] random button triggers start
|
||||
- [x] unexpected ast format without body expression (kalimba)
|
||||
- [x] highlighting seems too late (off by latency ?)
|
||||
|
||||
@ -138,9 +138,9 @@ function App() {
|
||||
initCode().then((decoded) => {
|
||||
pushLog(
|
||||
`🌀 Welcome to Strudel! ${
|
||||
decoded ? `Code was decoded from the URL` : `A random code snippet named "${name}" has been loaded!`
|
||||
} Press play or hit ctrl+enter to listen!`,
|
||||
'info',
|
||||
decoded ? `I have loaded the code from the URL.` : `A random code snippet named "${name}" has been loaded!`
|
||||
} Press play or hit ctrl+enter to run it!`,
|
||||
'highlight',
|
||||
);
|
||||
setCode(decoded || randomTune);
|
||||
});
|
||||
@ -174,12 +174,13 @@ function App() {
|
||||
});
|
||||
|
||||
return (
|
||||
// bg-gradient-to-t from-blue-900 to-slate-900
|
||||
<div className="h-screen flex flex-col">
|
||||
{!hideHeader && (
|
||||
<header
|
||||
id="header"
|
||||
className={cx(
|
||||
'flex-none w-full md:flex text-black shadow-lg justify-between z-[100] text-lg bg-linegray select-none sticky top-0',
|
||||
'flex-none w-full md:flex text-black justify-between z-[100] text-lg bg-header select-none sticky top-0',
|
||||
isEmbedded ? 'h-12 md:h-8' : 'h-25 md:h-14',
|
||||
)}
|
||||
>
|
||||
@ -322,10 +323,17 @@ function App() {
|
||||
</div>
|
||||
</header>
|
||||
)}
|
||||
<section className="grow flex text-gray-100 relative overflow-auto cursor-text" id="code">
|
||||
<CodeMirror value={code} onChange={setCode} onViewChanged={setView} />
|
||||
<section className="grow flex text-gray-100 relative overflow-auto cursor-text pb-4" id="code">
|
||||
<CodeMirror
|
||||
value={code}
|
||||
onChange={(c) => {
|
||||
setCode(c);
|
||||
started && pushLog('[edit] code changed. hit ctrl+enter to update');
|
||||
}}
|
||||
onViewChanged={setView}
|
||||
/>
|
||||
</section>
|
||||
<footer className="bg-linegray">
|
||||
<footer className="bg-footer">
|
||||
{/* {error && (
|
||||
<div
|
||||
className={cx(
|
||||
@ -338,14 +346,14 @@ function App() {
|
||||
)} */}
|
||||
<div
|
||||
ref={logBox}
|
||||
className="text-white font-mono text-sm h-32 flex-none overflow-auto max-w-full break-all p-2"
|
||||
className="text-white font-mono text-sm h-32 flex-none overflow-auto max-w-full break-all p-4"
|
||||
>
|
||||
{log.map((l, i) => (
|
||||
<div
|
||||
key={l.index}
|
||||
className={cx(l.type === 'error' && 'text-red-500', l.type === 'info' && 'text-secondary')}
|
||||
className={cx(l.type === 'error' && 'text-red-500', l.type === 'highlight' && 'text-highlight')}
|
||||
>
|
||||
{l.index}: {l.message}
|
||||
> {l.message}
|
||||
{l.count ? ` (${l.count})` : ''}
|
||||
</div>
|
||||
))}
|
||||
|
||||
@ -16,6 +16,12 @@ module.exports = {
|
||||
highlight: '#ffcc00',
|
||||
linegray: '#8a91991a',
|
||||
lineblack: '#00000095',
|
||||
bg: '#222222',
|
||||
// header: '#8a91991a',
|
||||
// footer: '#8a91991a',
|
||||
// header: '#00000050',
|
||||
header: 'transparent',
|
||||
footer: '#00000050',
|
||||
},
|
||||
},
|
||||
},
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user