This commit is contained in:
2023-12-18 17:28:49 +02:00
12 changed files with 472 additions and 135 deletions

View File

@ -378,6 +378,14 @@
</svg>
<span class="text-selection_foreground">Destroy universes</span>
</button>
<!-- Upload audio samples -->
<p class="font-bold lg:text-xl text-sm ml-4 pb-2 pt-2 underline underline-offset-4 text-selection_background">Audio samples</p>
<label class="bg-brightwhite font-bold lg:py-4 lg:px-2 px-1 py-2 rounded-lg inline-flex items-center mx-4 text-selection_background">
<svg class="rotate-180 fill-current w-4 h-6 mr-2" xmlns="http://www.w3.org/2000/svg" viewBox="0 0 20 20"><path d="M13 8V2H7v6H2l8 8 8-8h-5zM0 18h20v2H0v-2z"/></svg>
<input id="upload-samples" type="file" class="hidden" accept="file" webkitdirectory directory multiple>
<span id="sample-indicator" class="text-selection_foreground">Import samples</span>
</label>
</div>
</div>
</div>

View File

@ -2920,7 +2920,7 @@ export class UserAPI {
address: address,
port: port,
args: args,
timetag: Math.round(Date.now() + this.app.clock.deadline),
timetag: Math.round(Date.now() + (this.app.clock.nudge - this.app.clock.deviation)),
} as OSCMessage);
};

View File

@ -1,11 +1,7 @@
// @ts-ignore
import { TransportNode } from "./TransportNode";
import TransportProcessor from "./TransportProcessor?worker&url";
import { Editor } from "./main";
import { tryEvaluate } from "./Evaluator";
// @ts-ignore
import { getAudioContext } from "superdough";
// @ts-ignore
import "zyklus";
const zeroPad = (num: number, places: number) =>
String(num).padStart(places, "0");
export interface TimePosition {
/**
@ -22,29 +18,35 @@ export interface TimePosition {
export class Clock {
/**
* The Clock Class is responsible for keeping track of the current time.
* It is also responsible for starting and stopping the Clock TransportNode.
*
* @param app - main application instance
* @param clock - zyklus clock
* @param ctx - current AudioContext used by app
* @param bpm - current beats per minute value
* @param time_signature - time signature
* @param time_position - current time position
* @param ppqn - pulses per quarter note
* @param tick - current tick since origin
* @param app - The main application instance
* @param ctx - The current AudioContext used by app
* @param transportNode - The TransportNode helper
* @param bpm - The current beats per minute value
* @param time_signature - The time signature
* @param time_position - The current time position
* @param ppqn - The pulses per quarter note
* @param tick - The current tick since origin
* @param running - Is the clock running?
* @param lastPauseTime - The last time the clock was paused
* @param lastPlayPressTime - The last time the clock was started
* @param totalPauseTime - The total time the clock has been paused / stopped
*/
private _bpm: number;
private _ppqn: number;
clock: any;
ctx: AudioContext;
logicalTime: number;
transportNode: TransportNode | null;
private _bpm: number;
time_signature: number[];
time_position: TimePosition;
private _ppqn: number;
tick: number;
running: boolean;
timeviewer: HTMLElement;
deadline: number;
lastPauseTime: number;
lastPlayPressTime: number;
totalPauseTime: number;
constructor(
public app: Editor,
@ -56,59 +58,31 @@ export class Clock {
this.tick = 0;
this._bpm = 120;
this._ppqn = 48;
this.transportNode = null;
this.ctx = ctx;
this.running = true;
this.deadline = 0;
this.timeviewer = document.getElementById("timeviewer")!;
this.clock = getAudioContext().createClock(
this.clockCallback,
this.pulse_duration,
);
this.lastPauseTime = 0;
this.lastPlayPressTime = 0;
this.totalPauseTime = 0;
ctx.audioWorklet
.addModule(TransportProcessor)
.then((e) => {
this.transportNode = new TransportNode(ctx, {}, this.app);
this.transportNode.connect(ctx.destination);
return e;
})
.catch((e) => {
console.log("Error loading TransportProcessor.js:", e);
});
}
// @ts-ignore
clockCallback = (time: number, duration: number, tick: number) => {
/**
* Callback function for the zyklus clock. Updates the clock info and sends a
* MIDI clock message if the setting is enabled. Also evaluates the global buffer.
*
* @param time - precise AudioContext time when the tick should happen
* @param duration - seconds between each tick
* @param tick - count of the current tick
*/
let deadline = time - getAudioContext().currentTime;
this.deadline = deadline;
this.tick = tick;
if (this.app.clock.running) {
if (this.app.settings.send_clock) {
this.app.api.MidiConnection.sendMidiClock();
}
const futureTimeStamp = this.app.clock.convertTicksToTimeposition(
this.app.clock.tick,
);
this.app.clock.time_position = futureTimeStamp;
if (futureTimeStamp.pulse % this.app.clock.ppqn == 0) {
this.timeviewer.innerHTML = `${zeroPad(futureTimeStamp.bar, 2)}:${
futureTimeStamp.beat + 1
} / ${this.app.clock.bpm}`;
}
if (this.app.exampleIsPlaying) {
tryEvaluate(this.app, this.app.example_buffer);
} else {
tryEvaluate(this.app, this.app.global_buffer);
}
}
// Implement TransportNode clock callback and update clock info with it
};
convertTicksToTimeposition(ticks: number): TimePosition {
/**
* Converts ticks to a time position.
*
* @param ticks - ticks to convert
* @returns TimePosition
* Converts ticks to a TimePosition object.
* @param ticks The number of ticks to convert.
* @returns The TimePosition object representing the converted ticks.
*/
const beatsPerBar = this.app.clock.time_signature[0];
const ppqnPosition = ticks % this.app.clock.ppqn;
const beatNumber = Math.floor(ticks / this.app.clock.ppqn);
@ -119,9 +93,10 @@ export class Clock {
get ticks_before_new_bar(): number {
/**
* Calculates the number of ticks before the next bar.
* This function returns the number of ticks separating the current moment
* from the beginning of the next bar.
*
* @returns number - ticks before the next bar
* @returns number of ticks until next bar
*/
const ticskMissingFromBeat = this.ppqn - this.time_position.pulse;
const beatsMissingFromBar = this.beats_per_bar - this.time_position.beat;
@ -130,9 +105,10 @@ export class Clock {
get next_beat_in_ticks(): number {
/**
* Calculates the number of ticks before the next beat.
* This function returns the number of ticks separating the current moment
* from the beginning of the next beat.
*
* @returns number - ticks before the next beat
* @returns number of ticks until next beat
*/
return this.app.clock.pulses_since_origin + this.time_position.pulse;
}
@ -140,8 +116,6 @@ export class Clock {
get beats_per_bar(): number {
/**
* Returns the number of beats per bar.
*
* @returns number - beats per bar
*/
return this.time_signature[0];
}
@ -150,7 +124,7 @@ export class Clock {
/**
* Returns the number of beats since the origin.
*
* @returns number - beats since the origin
* @returns number of beats since origin
*/
return Math.floor(this.tick / this.ppqn);
}
@ -159,7 +133,7 @@ export class Clock {
/**
* Returns the number of pulses since the origin.
*
* @returns number - pulses since the origin
* @returns number of pulses since origin
*/
return this.tick;
}
@ -167,112 +141,119 @@ export class Clock {
get pulse_duration(): number {
/**
* Returns the duration of a pulse in seconds.
* @returns number - duration of a pulse in seconds
*/
return 60 / this.bpm / this.ppqn;
}
public pulse_duration_at_bpm(bpm: number = this.bpm): number {
/**
* Returns the duration of a pulse in seconds at a given bpm.
*
* @param bpm - bpm to calculate the pulse duration for
* @returns number - duration of a pulse in seconds
* Returns the duration of a pulse in seconds at a specific bpm.
*/
return 60 / bpm / this.ppqn;
}
get bpm(): number {
/**
* Returns the current bpm.
* @returns number - current bpm
*/
return this._bpm;
}
get tickDuration(): number {
/**
* Returns the duration of a tick in seconds.
* @returns number - duration of a tick in seconds
*/
return 1 / this.ppqn;
set nudge(nudge: number) {
this.transportNode?.setNudge(nudge);
}
set bpm(bpm: number) {
/**
* Sets the bpm.
* @param bpm - bpm to set
*/
if (bpm > 0 && this._bpm !== bpm) {
this.transportNode?.setBPM(bpm);
this._bpm = bpm;
this.clock.setDuration(() => (this.tickDuration * 60) / this.bpm);
this.logicalTime = this.realTime;
}
}
get ppqn(): number {
/**
* Returns the current ppqn.
* @returns number - current ppqn
*/
return this._ppqn;
}
get realTime(): number {
return this.app.audioContext.currentTime - this.totalPauseTime;
}
get deviation(): number {
return Math.abs(this.logicalTime - this.realTime);
}
set ppqn(ppqn: number) {
/**
* Sets the ppqn.
* @param ppqn - ppqn to set
* @returns number - current ppqn
*/
if (ppqn > 0 && this._ppqn !== ppqn) {
this._ppqn = ppqn;
this.transportNode?.setPPQN(ppqn);
this.logicalTime = this.realTime;
}
}
public incrementTick(bpm: number) {
this.tick++;
this.logicalTime += this.pulse_duration_at_bpm(bpm);
}
public nextTickFrom(time: number, nudge: number): number {
/**
* Compute the time remaining before the next clock tick.
* @param time - audio context currentTime
* @param nudge - nudge in the future (in seconds)
* @returns remainingTime
*/
const pulseDuration = this.pulse_duration;
const nudgedTime = time + nudge;
const nextTickTime = Math.ceil(nudgedTime / pulseDuration) * pulseDuration;
const remainingTime = nextTickTime - nudgedTime;
return remainingTime;
}
public convertPulseToSecond(n: number): number {
/**
* Converts a pulse to a second.
*/
return n * this.pulse_duration;
}
public start(): void {
/**
* Start the clock
* Starts the TransportNode (starts the clock).
*
* @remark also sends a MIDI message if a port is declared
*/
this.app.audioContext.resume();
this.running = true;
this.app.api.MidiConnection.sendStartMessage();
this.clock.start();
this.lastPlayPressTime = this.app.audioContext.currentTime;
this.totalPauseTime += this.lastPlayPressTime - this.lastPauseTime;
this.transportNode?.start();
}
public pause(): void {
/**
* Pause the clock.
* Pauses the TransportNode (pauses the clock).
*
* @remark also sends a MIDI message if a port is declared
*/
this.running = false;
this.transportNode?.pause();
this.app.api.MidiConnection.sendStopMessage();
this.clock.pause();
this.lastPauseTime = this.app.audioContext.currentTime;
this.logicalTime = this.realTime;
}
public stop(): void {
/**
* Stops the clock.
* Stops the TransportNode (stops the clock).
*
* @remark also sends a MIDI message if a port is declared
*/
this.running = false;
this.tick = 0;
this.lastPauseTime = this.app.audioContext.currentTime;
this.logicalTime = this.realTime;
this.time_position = { bar: 0, beat: 0, pulse: 0 };
this.app.api.MidiConnection.sendStopMessage();
this.clock.stop();
this.transportNode?.stop();
}
}
}

View File

@ -18,6 +18,8 @@ export const singleElements = {
load_universe_button: "load-universe-button",
download_universe_button: "download-universes",
upload_universe_button: "upload-universes",
upload_samples_button: "upload-samples",
sample_indicator: "sample-indicator",
destroy_universes_button: "destroy-universes",
documentation_button: "doc-button-1",
eval_button: "eval-button-1",
@ -81,7 +83,7 @@ export const createDocumentationStyle = (app: Editor) => {
p: "lg:text-2xl text-base text-white lg:mx-6 mx-2 my-4 leading-normal",
warning:
"animate-pulse lg:text-2xl font-bold text-brightred lg:mx-6 mx-2 my-4 leading-normal",
a: "lg:text-2xl text-base text-white",
a: "lg:text-2xl text-base text-brightred",
code: `lg:my-4 sm:my-1 text-base lg:text-xl block whitespace-pre overflow-x-hidden`,
icode:
"lg:my-1 my-1 lg:text-xl sm:text-xs text-brightwhite font-mono bg-brightblack",

View File

@ -81,8 +81,8 @@ export const getCodeMirrorTheme = (theme: {[key: string]: string}): Extension =>
},
"&.cm-focused .cm-selectionBackground, .cm-selectionBackground, .cm-content ::selection":
{
backgroundColor: selection_foreground,
border: `0.5px solid ${selection_background}`,
backgroundColor: brightwhite,
border: `1px solid ${brightwhite}`,
},
".cm-panels": {
backgroundColor: selection_background,
@ -98,18 +98,15 @@ export const getCodeMirrorTheme = (theme: {[key: string]: string}): Extension =>
backgroundColor: red,
},
".cm-activeLine": {
// backgroundColor: highlightBackground
backgroundColor: `${selection_foreground}`,
backgroundColor: `rgba(${(parseInt(selection_background.slice(1,3), 16))}, ${(parseInt(selection_background.slice(3,5), 16))}, ${(parseInt(selection_background.slice(5,7), 16))}, 0.25)`,
},
".cm-selectionMatch": {
backgroundColor: yellow,
outline: `1px solid ${red}`,
backgroundColor: `rgba(${(parseInt(selection_background.slice(1,3), 16))}, ${(parseInt(selection_background.slice(3,5), 16))}, ${(parseInt(selection_background.slice(5,7), 16))}, 0.25)`,
outline: `1px solid ${brightwhite}`,
},
"&.cm-focused .cm-matchingBracket": {
color: yellow,
// outline: `1px solid ${base02}`,
color: `rgba(${(parseInt(selection_background.slice(1,3), 16))}, ${(parseInt(selection_background.slice(3,5), 16))}, ${(parseInt(selection_background.slice(5,7), 16))}, 0.25)`,
},
"&.cm-focused .cm-nonmatchingBracket": {
color: yellow,
},
@ -153,9 +150,9 @@ export const getCodeMirrorTheme = (theme: {[key: string]: string}): Extension =>
{ tag: t.keyword, color: yellow },
{ tag: [t.name, t.deleted, t.character, t.macroName], color: red, },
{ tag: [t.function(t.variableName)], color: blue },
{ tag: [t.labelName], color: red },
{ tag: [t.labelName], color: brightwhite },
{ tag: [t.color, t.constant(t.name), t.standard(t.name)], color: cyan, },
{ tag: [t.definition(t.name), t.separator], color: magenta },
{ tag: [t.definition(t.name), t.separator], color: brightwhite },
{ tag: [t.brace], color: white },
{ tag: [t.annotation], color: blue, },
{ tag: [t.number, t.changed, t.annotation, t.modifier, t.self, t.namespace], color: yellow, },
@ -229,7 +226,7 @@ export const getCodeMirrorTheme = (theme: {[key: string]: string}): Extension =>
// pointerEvents: "none",
// },
// });
//
// const debugHighlightStyle = HighlightStyle.define(
// // @ts-ignore
// Object.entries(t).map(([key, value]) => {

155
src/IO/SampleLoading.ts Normal file
View File

@ -0,0 +1,155 @@
/**
* This code is taken from https://github.com/tidalcycles/strudel/pull/839. The logic is written by
* daslyfe (Jade Rose Rowland). I have tweaked it a bit to fit the needs of this project (TypeScript),
* etc... Many thanks for this piece of code! This code is initially part of the Strudel project:
* https://github.com/tidalcycles/strudel.
*/
// @ts-ignore
import { registerSound, onTriggerSample } from "superdough";
export const isAudioFile = (filename: string) => ['wav', 'mp3'].includes(filename.split('.').slice(-1)[0]);
interface samplesDBConfig {
dbName: string,
table: string,
columns: string[],
version: number
}
export const samplesDBConfig = {
dbName: 'samples',
table: 'usersamples',
columns: ['data_url', 'title'],
version: 1
}
async function bufferToDataUrl(buf: Buffer) {
return new Promise((resolve) => {
var blob = new Blob([buf], { type: 'application/octet-binary' });
var reader = new FileReader();
reader.onload = function (event: Event) {
// @ts-ignore
resolve(event.target.result);
};
reader.readAsDataURL(blob);
});
}
const processFilesForIDB = async (files: FileList) => {
return await Promise.all(
Array.from(files)
.map(async (s: File) => {
const title = s.name;
if (!isAudioFile(title)) {
return;
}
//create obscured url to file system that can be fetched
const sUrl = URL.createObjectURL(s);
//fetch the sound and turn it into a buffer array
const buf = await fetch(sUrl).then((res) => res.arrayBuffer());
//create a url blob containing all of the buffer data
// @ts-ignore
// TODO: conversion to do here, remove ts-ignore
const base64 = await bufferToDataUrl(buf);
return {
title,
blob: base64,
id: s.webkitRelativePath,
};
})
.filter(Boolean),
).catch((error) => {
console.log('Something went wrong while processing uploaded files', error);
});
};
export const registerSamplesFromDB = (config: samplesDBConfig, onComplete = () => {}) => {
openDB(config, (objectStore: IDBObjectStore) => {
let query = objectStore.getAll();
query.onsuccess = (event: Event) => {
// @ts-ignore
const soundFiles = event.target.result;
if (!soundFiles?.length) {
return;
}
const sounds = new Map();
[...soundFiles]
.sort((a, b) => a.title.localeCompare(b.title, undefined, { numeric: true, sensitivity: 'base' }))
.forEach((soundFile) => {
const title = soundFile.title;
if (!isAudioFile(title)) {
return;
}
const splitRelativePath = soundFile.id?.split('/');
const parentDirectory = splitRelativePath[splitRelativePath.length - 2];
const soundPath = soundFile.blob;
const soundPaths = sounds.get(parentDirectory) ?? new Set();
soundPaths.add(soundPath);
sounds.set(parentDirectory, soundPaths);
});
sounds.forEach((soundPaths, key) => {
const value = Array.from(soundPaths);
// @ts-ignore
registerSound(key, (t, hapValue, onended) => onTriggerSample(t, hapValue, onended, value), {
type: 'sample',
samples: value,
baseUrl: undefined,
prebake: false,
tag: "user",
});
});
onComplete();
};
});
};
export const openDB = (config: samplesDBConfig, onOpened: Function) => {
const { dbName, version, table, columns } = config
if (!('indexedDB' in window)) {
console.log('This browser doesn\'t support IndexedDB')
return
}
const dbOpen = indexedDB.open(dbName, version);
dbOpen.onupgradeneeded = (_event) => {
const db = dbOpen.result;
const objectStore = db.createObjectStore(table, { keyPath: 'id', autoIncrement: false });
columns.forEach((c: any) => {
objectStore.createIndex(c, c, { unique: false });
});
};
dbOpen.onerror = function (err: Event) {
console.log('Error opening DB: ', (err.target as IDBOpenDBRequest).error);
}
dbOpen.onsuccess = function (_event: Event) {
const db = dbOpen.result;
db.onversionchange = function() {
db.close();
alert("Database is outdated, please reload the page.")
};
const writeTransaction = db.transaction([table], 'readwrite'),
objectStore = writeTransaction.objectStore(table);
// Writing in the database here!
onOpened(objectStore)
}
}
export const uploadSamplesToDB = async (config: samplesDBConfig, files: FileList) => {
await processFilesForIDB(files).then((files) => {
const onOpened = (objectStore: IDBObjectStore, _db: IDBDatabase) => {
// @ts-ignore
files.forEach((file: File) => {
if (file == null) {
return;
}
objectStore.put(file);
});
};
openDB(config, onOpened);
});
};

View File

@ -25,6 +25,7 @@ import { lineNumbers } from "@codemirror/view";
import { jsCompletions } from "./EditorSetup";
import { createDocumentationStyle } from "./DomElements";
import { saveState } from "./WindowBehavior";
import { registerSamplesFromDB, samplesDBConfig, uploadSamplesToDB } from "./IO/SampleLoading";
export const installInterfaceLogic = (app: Editor) => {
// Initialize style
@ -159,6 +160,21 @@ export const installInterfaceLogic = (app: Editor) => {
);
});
app.interface.upload_samples_button.addEventListener("input", async (event) => {
let fileInput = event.target as HTMLInputElement;
if (!fileInput.files?.length) {
return;
}
app.interface.sample_indicator.innerText = "Loading...";
app.interface.sample_indicator.classList.add("animate-pulse");
await uploadSamplesToDB(samplesDBConfig, fileInput.files).then(() => {
registerSamplesFromDB(samplesDBConfig, () => {
app.interface.sample_indicator.innerText = "Import samples";
app.interface.sample_indicator.classList.remove("animate-pulse");
});
});
});
app.interface.upload_universe_button.addEventListener("click", () => {
const fileInput = document.createElement("input");
fileInput.type = "file";
@ -583,4 +599,4 @@ export const installInterfaceLogic = (app: Editor) => {
console.log("Could not find element " + name);
}
});
};
};

65
src/TransportNode.js Normal file
View File

@ -0,0 +1,65 @@
import { tryEvaluate } from "./Evaluator";
const zeroPad = (num, places) => String(num).padStart(places, "0");
export class TransportNode extends AudioWorkletNode {
constructor(context, options, application) {
super(context, "transport", options);
this.app = application;
this.port.addEventListener("message", this.handleMessage);
this.port.start();
this.timeviewer = document.getElementById("timeviewer");
}
/** @type {(this: MessagePort, ev: MessageEvent<any>) => any} */
handleMessage = (message) => {
if(message.data) {
if (message.data.type === "bang") {
if(this.app.clock.running) {
if (this.app.settings.send_clock) {
this.app.api.MidiConnection.sendMidiClock();
}
const futureTimeStamp = this.app.clock.convertTicksToTimeposition(
this.app.clock.tick
);
this.app.clock.time_position = futureTimeStamp;
this.timeviewer.innerHTML = `${zeroPad(futureTimeStamp.bar, 2)}:${futureTimeStamp.beat + 1
}:${zeroPad(futureTimeStamp.pulse, 2)} / ${this.app.clock.bpm}`;
if (this.app.exampleIsPlaying) {
tryEvaluate(this.app, this.app.example_buffer);
} else {
tryEvaluate(this.app, this.app.global_buffer);
}
this.app.clock.incrementTick(message.data.bpm);
}
}
}
};
start() {
this.port.postMessage({ type: "start" });
}
pause() {
this.port.postMessage({ type: "pause" });
}
resume() {
this.port.postMessage({ type: "resume" });
}
setBPM(bpm) {
this.port.postMessage({ type: "bpm", value: bpm });
}
setPPQN(ppqn) {
this.port.postMessage({ type: "ppqn", value: ppqn });
}
setNudge(nudge) {
this.port.postMessage({ type: "nudge", value: nudge });
}
stop() {
this.port.postMessage({type: "stop" });
}
}

47
src/TransportProcessor.js Normal file
View File

@ -0,0 +1,47 @@
class TransportProcessor extends AudioWorkletProcessor {
constructor(options) {
super(options);
this.port.addEventListener("message", this.handleMessage);
this.port.start();
this.nudge = 0;
this.started = false;
this.bpm = 120;
this.ppqn = 48;
this.currentPulsePosition = 0;
}
handleMessage = (message) => {
if (message.data && message.data.type === "ping") {
this.port.postMessage(message.data);
} else if (message.data.type === "start") {
this.started = true;
} else if (message.data.type === "pause") {
this.started = false;
} else if (message.data.type === "stop") {
this.started = false;
} else if (message.data.type === "bpm") {
this.bpm = message.data.value;
this.currentPulsePosition = currentTime;
} else if (message.data.type === "ppqn") {
this.ppqn = message.data.value;
this.currentPulsePosition = currentTime;
} else if (message.data.type === "nudge") {
this.nudge = message.data.value;
}
};
process(inputs, outputs, parameters) {
if (this.started) {
const adjustedCurrentTime = currentTime + this.nudge / 100;
const beatNumber = adjustedCurrentTime / (60 / this.bpm);
const currentPulsePosition = Math.ceil(beatNumber * this.ppqn);
if (currentPulsePosition > this.currentPulsePosition) {
this.currentPulsePosition = currentPulsePosition;
this.port.postMessage({ type: "bang", bpm: this.bpm });
}
}
return true;
}
}
registerProcessor("transport", TransportProcessor);

View File

@ -437,7 +437,11 @@ export class SoundEvent extends AudibleEvent {
if (filteredEvent.freq) {
delete filteredEvent.note;
}
superdough(filteredEvent, this.app.clock.deadline, filteredEvent.dur);
superdough(
filteredEvent,
this.nudge - this.app.clock.deviation,
filteredEvent.dur
);
}
};
@ -461,7 +465,7 @@ export class SoundEvent extends AudibleEvent {
address: oscAddress,
port: oscPort,
args: event,
timetag: Math.round(Date.now() + this.app.clock.deadline),
timetag: Math.round(Date.now() + (this.nudge - this.app.clock.deviation)),
} as OSCMessage);
}
};

View File

@ -148,5 +148,13 @@ This sample pack is only one folder full of french phonems! It sounds super nice
<div class="lg:pl-6 lg:pr-6 w-fit rounded-lg bg-background mx-6 mt-2 my-6 px-2 py-2 max-h-96 flex flex-row flex-wrap gap-x-2 gap-y-2 overflow-y-scroll">
${samples_to_markdown(application, "Juliette")}
</div>
## Your samples
These samples are the one you have loaded for the duration of the session using the <ic>Import Samples</ic> button in the configuration menu.
<div class="lg:pl-6 lg:pr-6 w-fit rounded-lg bg-background mx-6 mt-2 my-6 px-2 py-2 max-h-96 flex flex-row flex-wrap gap-x-2 gap-y-2 overflow-y-scroll">
${samples_to_markdown(application, "user")}
</div>
`;
};

View File

@ -8,7 +8,7 @@ export const generators = (application: Editor): string => {
JavaScript <a href="https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Generator" target="_blank">generators</a> are powerful functions for generating value sequences. They can be used to generate melodies, rhythms or control parameters.
In Topos generator functions should be called using the <ic>cache(key, function)</ic> function to store the current state of the generator. This function takes two arguments: the name for the cache and the generator instance.
In Topos generator functions should be called using the <ic>cache(key, function)</ic> function to store the current state of the generator. This function takes two arguments: the name for the cache and the generator instance.
Once the generator is cached the values will be returned from the named cache even if the generator function is modified. To clear the current cache and to re-evaluate the modified generator use the **Shift+Ctrl+Backspace** shortcut. Alternatively you can cache the modified generator using a different name.
@ -38,14 +38,16 @@ ${makeExample(
const s = Math.tan(x/10)+Math.sin(x/20);
yield 2 * Math.pow(s, 3) - 6 * Math.pow(s, 2) + 5 * s + 200;
x++;
}
}
}
beat(.125) && sound("triangle").freq(cache("mathyshit",poly())).out()
`,
true,
)};
When you want to dance with a dynamical system in controlled musical chaos, Topos is waiting for you:
${makeExample(
"Truly scale free chaos inspired by Lorentz attractor",
`
@ -54,16 +56,16 @@ ${makeExample(
const dx = 10 * (y - x);
const dy = x * (rho - z) - y;
const dz = x * y - beta * z;
x += dx * 0.01;
y += dy * 0.01;
z += dz * 0.01;
const value = 300 + 30 * (Math.sin(x) + Math.tan(y) + Math.cos(z))
yield value;
}
}
beat(0.25) :: sound("triangle")
.freq(cache("stranger",strange(3,5,2)))
.adsr(.15,.1,.1,.1)
@ -72,9 +74,61 @@ ${makeExample(
true,
)};
${makeExample(
"Henon and his discrete music",
`
function* henonMap(x = 0, y = 0, a = 1.4, b = 0.3) {
while (true) {
const newX = 1 - a * x ** 2 + y;
const newY = b * x;
const fusionPoint = newX + newY
yield fusionPoint * 300;
[x, y] = [newX, newY]
}
}
beat(0.25) :: sound("sawtooth")
.semitones(1,1,2,2,2,1,2,1)
.freq(cache("Hénon Synth", henonMap()))
.adsr(0, 0.1, 0.1, 0.5).out()
z0('1 {-2}').octave(-2).sound('bd').out()
z1('e. 1 s 3!2 e 3!2 s 9 8 1')
.sound('dr').gain(0.3).octave(-5).out()
`,
true,
)};
${makeExample(
"1970s fractal dream",
`
function* rossler(x = 0.1, y = 0.1, z = 0.1, a = 0.2, b = 0.2, c = 5.7) {
while (true) {
const dx = - y - z;
const dy = x + (a * y);
const dz = b + (x * z) - (c * z);
x += dx * 0.01;
y += dy * 0.01;
z += dz * 0.01;
const value = 250 * (Math.cosh(x*z) + Math.sinh(y*z))
yield value % 120 + 100;
}
}
beat(0.25) :: sound("triangle")
.freq(cache("Rössler attractor", rossler(3,4,1)))
.adsr(0,.1,.1,.1)
.log("freq").out()
`,
true,
)};
## OEIS integer sequences
To find some inspiration - or to enter into the void - one can visit <a href="https://oeis.org/" target="_blank">The On-Line Encyclopedia of Integer Sequences (OEIS)</a> to find some interesting integer sequences.
To find some inspiration - or to enter into the void - one can visit <a href="https://oeis.org/" target="_blank">The On-Line Encyclopedia of Integer Sequences (OEIS)</a> to find some interesting integer sequences.
Many of the sequences are implemented by <a href="https://github.com/acerix/jisg/tree/main/src/oeis" target="_blank">JISG</a> (Javascript Integer Sequence Generators) project. Those sequences can be referenced directly with the identifiers using the cache function.
@ -106,7 +160,7 @@ function* poly(x) {
x++;
}
}
z0(poly(1)).noteLength(0.5).semitones(2,2,3,2,2,2).sound("sine").out()
z1(poly(8)).noteLength(0.25).semitones(2,1,2,1,2,2).sound("sine").out()
z2(poly(-3)).noteLength(1.0).semitones(2,2,2,1,3,2).sound("sine").out()