mirror of
https://github.com/eliasstepanik/strudel-docker.git
synced 2026-01-22 19:18:31 +00:00
Merge pull request #48 from tidalcycles/squeezejoin
First run at squeezeBind, ref #32
This commit is contained in:
commit
ac5c844f0b
@ -16,6 +16,11 @@ Fraction.prototype.wholeCycle = function () {
|
|||||||
return new TimeSpan(this.sam(), this.nextSam());
|
return new TimeSpan(this.sam(), this.nextSam());
|
||||||
};
|
};
|
||||||
|
|
||||||
|
// The position of a time value relative to the start of its cycle.
|
||||||
|
Fraction.prototype.cyclePos = function () {
|
||||||
|
return this.sub(this.sam());
|
||||||
|
};
|
||||||
|
|
||||||
Fraction.prototype.lt = function (other) {
|
Fraction.prototype.lt = function (other) {
|
||||||
return this.compare(other) < 0;
|
return this.compare(other) < 0;
|
||||||
};
|
};
|
||||||
|
|||||||
@ -59,6 +59,15 @@ class TimeSpan {
|
|||||||
return spans;
|
return spans;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
cycleArc() {
|
||||||
|
// Shifts a timespan to one of equal duration that starts within cycle zero.
|
||||||
|
// (Note that the output timespan probably does not start *at* Time 0 --
|
||||||
|
// that only happens when the input Arc starts at an integral Time.)
|
||||||
|
const b = this.begin.cyclePos();
|
||||||
|
const e = b + (this.end - this.begin);
|
||||||
|
return new TimeSpan(b, e);
|
||||||
|
}
|
||||||
|
|
||||||
withTime(func_time) {
|
withTime(func_time) {
|
||||||
// Applies given function to both the begin and end time value of the timespan"""
|
// Applies given function to both the begin and end time value of the timespan"""
|
||||||
return new TimeSpan(func_time(this.begin), func_time(this.end));
|
return new TimeSpan(func_time(this.begin), func_time(this.end));
|
||||||
@ -140,6 +149,10 @@ class Hap {
|
|||||||
return this.whole.end.sub(this.whole.begin).valueOf();
|
return this.whole.end.sub(this.whole.begin).valueOf();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
wholeOrPart() {
|
||||||
|
return this.whole ? this.whole : this.part;
|
||||||
|
}
|
||||||
|
|
||||||
withSpan(func) {
|
withSpan(func) {
|
||||||
// Returns a new event with the function f applies to the event timespan.
|
// Returns a new event with the function f applies to the event timespan.
|
||||||
const whole = this.whole ? func(this.whole) : undefined;
|
const whole = this.whole ? func(this.whole) : undefined;
|
||||||
@ -186,6 +199,11 @@ class Hap {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
combineContext(b) {
|
||||||
|
const a = this;
|
||||||
|
return { ...a.context, ...b.context, locations: (a.context.locations || []).concat(b.context.locations || []) };
|
||||||
|
}
|
||||||
|
|
||||||
setContext(context) {
|
setContext(context) {
|
||||||
return new Hap(this.whole, this.part, this.value, context);
|
return new Hap(this.whole, this.part, this.value, context);
|
||||||
}
|
}
|
||||||
@ -351,12 +369,11 @@ class Pattern {
|
|||||||
if (s == undefined) {
|
if (s == undefined) {
|
||||||
return undefined;
|
return undefined;
|
||||||
}
|
}
|
||||||
// TODO: is it right to add event_val.context here?
|
|
||||||
return new Hap(
|
return new Hap(
|
||||||
whole_func(event_func.whole, event_val.whole),
|
whole_func(event_func.whole, event_val.whole),
|
||||||
s,
|
s,
|
||||||
event_func.value(event_val.value),
|
event_func.value(event_val.value),
|
||||||
event_val.context,
|
event_val.combineContext(event_func),
|
||||||
);
|
);
|
||||||
};
|
};
|
||||||
return flatten(
|
return flatten(
|
||||||
@ -388,11 +405,8 @@ class Pattern {
|
|||||||
const new_whole = hap_func.whole;
|
const new_whole = hap_func.whole;
|
||||||
const new_part = hap_func.part.intersection_e(hap_val.part);
|
const new_part = hap_func.part.intersection_e(hap_val.part);
|
||||||
const new_value = hap_func.value(hap_val.value);
|
const new_value = hap_func.value(hap_val.value);
|
||||||
const hap = new Hap(new_whole, new_part, new_value, {
|
const new_context = hap_val.combineContext(hap_func);
|
||||||
...hap_val.context,
|
const hap = new Hap(new_whole, new_part, new_value, new_context);
|
||||||
...hap_func.context,
|
|
||||||
locations: (hap_val.context.locations || []).concat(hap_func.context.locations || []),
|
|
||||||
});
|
|
||||||
haps.push(hap);
|
haps.push(hap);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -412,11 +426,8 @@ class Pattern {
|
|||||||
const new_whole = hap_val.whole;
|
const new_whole = hap_val.whole;
|
||||||
const new_part = hap_func.part.intersection_e(hap_val.part);
|
const new_part = hap_func.part.intersection_e(hap_val.part);
|
||||||
const new_value = hap_func.value(hap_val.value);
|
const new_value = hap_func.value(hap_val.value);
|
||||||
const hap = new Hap(new_whole, new_part, new_value, {
|
const new_context = hap_val.combineContext(hap_func);
|
||||||
...hap_func.context,
|
const hap = new Hap(new_whole, new_part, new_value, new_context);
|
||||||
...hap_val.context,
|
|
||||||
locations: (hap_val.context.locations || []).concat(hap_func.context.locations || []),
|
|
||||||
});
|
|
||||||
haps.push(hap);
|
haps.push(hap);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -572,6 +583,39 @@ class Pattern {
|
|||||||
return this.outerBind(id);
|
return this.outerBind(id);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
squeezeJoin() {
|
||||||
|
const pat_of_pats = this;
|
||||||
|
function query(state) {
|
||||||
|
const haps = pat_of_pats.query(state);
|
||||||
|
function flatHap(outerHap) {
|
||||||
|
const pat = outerHap.value._compressSpan(outerHap.wholeOrPart().cycleArc());
|
||||||
|
const innerHaps = pat.query(state.setSpan(outerHap.part));
|
||||||
|
function munge(outer, inner) {
|
||||||
|
let whole = undefined;
|
||||||
|
if (inner.whole && outer.whole) {
|
||||||
|
whole = inner.whole.intersection(outer.whole);
|
||||||
|
if (!whole) {
|
||||||
|
// The wholes are present, but don't intersect
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
const part = inner.part.intersection(outer.part);
|
||||||
|
if (!part) {
|
||||||
|
// The parts don't intersect
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
const context = inner.combineContext(outer);
|
||||||
|
return new Hap(whole, part, inner.value, context);
|
||||||
|
}
|
||||||
|
return innerHaps.map(innerHap => munge(outerHap, innerHap))
|
||||||
|
}
|
||||||
|
const result = flatten(haps.map(flatHap));
|
||||||
|
// remove undefineds
|
||||||
|
return result.filter(x => x);
|
||||||
|
}
|
||||||
|
return new Pattern(query);
|
||||||
|
}
|
||||||
|
|
||||||
_apply(func) {
|
_apply(func) {
|
||||||
return func(this);
|
return func(this);
|
||||||
}
|
}
|
||||||
@ -633,6 +677,10 @@ class Pattern {
|
|||||||
return this._fast(Fraction(1).div(factor));
|
return this._fast(Fraction(1).div(factor));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
_ply(factor) {
|
||||||
|
return this.fmap(x => pure(x)._fast(factor)).squeezeJoin()
|
||||||
|
}
|
||||||
|
|
||||||
// cpm = cycles per minute
|
// cpm = cycles per minute
|
||||||
_cpm(cpm) {
|
_cpm(cpm) {
|
||||||
return this._fast(cpm / 60);
|
return this._fast(cpm / 60);
|
||||||
@ -829,6 +877,7 @@ Pattern.prototype.patternified = [
|
|||||||
'apply',
|
'apply',
|
||||||
'fast',
|
'fast',
|
||||||
'slow',
|
'slow',
|
||||||
|
'ply',
|
||||||
'cpm',
|
'cpm',
|
||||||
'early',
|
'early',
|
||||||
'late',
|
'late',
|
||||||
@ -836,7 +885,7 @@ Pattern.prototype.patternified = [
|
|||||||
'legato',
|
'legato',
|
||||||
'velocity',
|
'velocity',
|
||||||
'segment',
|
'segment',
|
||||||
'color',
|
'color'
|
||||||
];
|
];
|
||||||
// methods that create patterns, which are added to patternified Pattern methods
|
// methods that create patterns, which are added to patternified Pattern methods
|
||||||
Pattern.prototype.factories = { pure, stack, slowcat, fastcat, cat, timeCat, sequence, polymeter, pm, polyrhythm, pr };
|
Pattern.prototype.factories = { pure, stack, slowcat, fastcat, cat, timeCat, sequence, polymeter, pm, polyrhythm, pr };
|
||||||
@ -995,7 +1044,6 @@ function polymeterSteps(steps, ...args) {
|
|||||||
if (steps == seq[1]) {
|
if (steps == seq[1]) {
|
||||||
pats.push(seq[0]);
|
pats.push(seq[0]);
|
||||||
} else {
|
} else {
|
||||||
console.log("aha");
|
|
||||||
pats.push(seq[0]._fast(Fraction(steps).div(Fraction(seq[1]))));
|
pats.push(seq[0]._fast(Fraction(steps).div(Fraction(seq[1]))));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1050,6 +1098,7 @@ const iter = curry((a, pat) => pat.iter(a));
|
|||||||
const iterBack = curry((a, pat) => pat.iter(a));
|
const iterBack = curry((a, pat) => pat.iter(a));
|
||||||
const chunk = curry((a, pat) => pat.chunk(a));
|
const chunk = curry((a, pat) => pat.chunk(a));
|
||||||
const chunkBack = curry((a, pat) => pat.chunkBack(a));
|
const chunkBack = curry((a, pat) => pat.chunkBack(a));
|
||||||
|
const ply = curry((a, pat) => pat.ply(a));
|
||||||
|
|
||||||
// problem: curried functions with spread arguments must have pat at the beginning
|
// problem: curried functions with spread arguments must have pat at the beginning
|
||||||
// with this, we cannot keep the pattern open at the end.. solution for now: use array to keep using pat as last arg
|
// with this, we cannot keep the pattern open at the end.. solution for now: use array to keep using pat as last arg
|
||||||
@ -1207,4 +1256,5 @@ export {
|
|||||||
iterBack,
|
iterBack,
|
||||||
chunk,
|
chunk,
|
||||||
chunkBack,
|
chunkBack,
|
||||||
|
ply,
|
||||||
};
|
};
|
||||||
|
|||||||
@ -33,6 +33,8 @@ import {
|
|||||||
square2,
|
square2,
|
||||||
tri,
|
tri,
|
||||||
tri2,
|
tri2,
|
||||||
|
id,
|
||||||
|
ply,
|
||||||
} from '../strudel.mjs';
|
} from '../strudel.mjs';
|
||||||
//import { Time } from 'tone';
|
//import { Time } from 'tone';
|
||||||
import pkg from 'tone';
|
import pkg from 'tone';
|
||||||
@ -489,4 +491,20 @@ describe('Pattern', function() {
|
|||||||
)
|
)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
describe("squeezeJoin", () => {
|
||||||
|
it("Can squeeze", () => {
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
sequence("a", ["a","a"]).fmap(a => fastcat("b", "c")).squeezeJoin().firstCycle(),
|
||||||
|
sequence(["b", "c"],[["b", "c"],["b", "c"]]).firstCycle()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
describe("ply", () => {
|
||||||
|
it("Can ply(3)", () => {
|
||||||
|
assert.deepStrictEqual(
|
||||||
|
sequence("a", ["b","c"]).ply(3).firstCycle(),
|
||||||
|
sequence(pure("a").fast(3), [pure("b").fast(3), pure("c").fast(3)]).firstCycle()
|
||||||
|
)
|
||||||
|
})
|
||||||
|
})
|
||||||
})
|
})
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user