This commit is contained in:
2025-09-27 12:00:17 +02:00
commit b564e41820
60 changed files with 7925 additions and 0 deletions

52
src/App.tsx Normal file
View File

@ -0,0 +1,52 @@
import { useStore } from '@nanostores/react'
import { appSettings, generatedImages, isGenerating } from './stores'
import { generateTixyImages } from './generators/tixy'
import { generatePicsumImages } from './generators/picsum'
import { generateArtInstituteImages } from './generators/art-institute'
import { generateGeometricTilesImages } from './generators/geometric-tiles'
import GeneratorSelector from './components/GeneratorSelector'
import ImageGrid from './components/ImageGrid'
import AudioPanel from './components/AudioPanel'
function App() {
const settings = useStore(appSettings)
const generating = useStore(isGenerating)
const handleGenerate = async () => {
isGenerating.set(true)
try {
let newImages
if (settings.selectedGenerator === 'tixy') {
newImages = generateTixyImages(settings.gridSize, 64)
} else if (settings.selectedGenerator === 'picsum') {
newImages = await generatePicsumImages(settings.gridSize, 512)
} else if (settings.selectedGenerator === 'art-institute') {
newImages = await generateArtInstituteImages(settings.gridSize, 512)
} else if (settings.selectedGenerator === 'geometric-tiles') {
newImages = generateGeometricTilesImages(settings.gridSize, 256)
} else {
newImages = []
}
generatedImages.set(newImages)
} catch (error) {
console.error('Error generating images:', error)
}
isGenerating.set(false)
}
return (
<div className="min-h-screen bg-black text-white flex">
<div className="flex-1 flex flex-col">
<GeneratorSelector onGenerate={handleGenerate} isGenerating={generating} />
<ImageGrid />
</div>
<AudioPanel />
</div>
)
}
export default App

View File

@ -0,0 +1,272 @@
import { useStore } from '@nanostores/react'
import { selectedImage, panelOpen, generatedImages, synthesisParams } from '../stores'
import { synthesizeFromImage, downloadWAV, playAudio, type WindowType } from '../spectral-synthesis'
import { useState } from 'react'
export default function AudioPanel() {
const selected = useStore(selectedImage)
const images = useStore(generatedImages)
const params = useStore(synthesisParams)
const [isProcessing, setIsProcessing] = useState(false)
const handleClearSelection = () => {
selectedImage.set(null)
}
const updateParam = <K extends keyof typeof params>(key: K, value: typeof params[K]) => {
synthesisParams.set({ ...params, [key]: value })
}
const handleDownloadSingle = async () => {
if (!selected) return
setIsProcessing(true)
try {
const audio = synthesizeFromImage(selected.imageData, params)
downloadWAV(audio, params.sampleRate, `audio-${selected.id}.wav`)
} catch (error) {
console.error('Error generating audio:', error)
}
setIsProcessing(false)
}
const handlePlaySingle = async () => {
if (!selected) return
setIsProcessing(true)
try {
const audio = synthesizeFromImage(selected.imageData, params)
await playAudio(audio, params.sampleRate)
} catch (error) {
console.error('Error playing audio:', error)
}
setIsProcessing(false)
}
const handleDownloadAll = async () => {
if (images.length === 0) return
setIsProcessing(true)
try {
for (let i = 0; i < images.length; i++) {
const image = images[i]
const audio = synthesizeFromImage(image.imageData, params)
downloadWAV(audio, params.sampleRate, `audio-${image.id}.wav`)
if (i < images.length - 1) {
await new Promise(resolve => setTimeout(resolve, 500))
}
}
} catch (error) {
console.error('Error generating all audio:', error)
}
setIsProcessing(false)
}
return (
<div className="w-96 bg-black border-l border-gray-800 flex flex-col h-screen">
{/* Selected Image or Placeholder */}
<div className="p-4 border-b border-gray-800 flex-shrink-0">
{selected ? (
<div className="flex items-center space-x-3">
<div className="w-16 h-16 border border-gray-600">
<canvas
ref={(canvas) => {
if (canvas && selected.canvas) {
const ctx = canvas.getContext('2d')!
canvas.width = selected.canvas.width
canvas.height = selected.canvas.height
canvas.style.width = '100%'
canvas.style.height = '100%'
if (selected.generator === 'tixy' || selected.generator === 'geometric-tiles') {
ctx.imageSmoothingEnabled = false
} else {
ctx.imageSmoothingEnabled = true
}
ctx.drawImage(selected.canvas, 0, 0)
}
}}
className="w-full h-full block"
style={{ imageRendering: (selected.generator === 'tixy' || selected.generator === 'geometric-tiles') ? 'pixelated' : 'auto' }}
/>
</div>
<div className="text-gray-300">
<div className="text-sm font-medium">{selected.generator}</div>
<div className="text-xs text-gray-500">ID: {selected.id.slice(-8)}</div>
</div>
</div>
) : (
<div className="text-center text-gray-500 py-8">
<div className="text-sm">No image selected</div>
<div className="text-xs mt-1">Click an image from the grid to export audio</div>
</div>
)}
</div>
{/* Synthesis Parameters - Scrollable */}
<div className="flex-1 overflow-y-auto">
<div className="p-4 space-y-4">
<h4 className="text-white font-medium text-sm">Synthesis Parameters</h4>
<div className="space-y-3">
<div>
<label className="block text-xs text-gray-400 mb-1">
Duration: {params.duration}s
</label>
<input
type="range"
min="1"
max="30"
step="1"
value={params.duration}
onChange={(e) => updateParam('duration', Number(e.target.value))}
className="w-full"
/>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Max Partials: {params.maxPartials}
</label>
<input
type="range"
min="10"
max="500"
step="10"
value={params.maxPartials}
onChange={(e) => updateParam('maxPartials', Number(e.target.value))}
className="w-full"
/>
<p className="text-xs text-gray-500 mt-1">Controls audio complexity vs performance</p>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Frequency Resolution: {params.frequencyResolution}x
</label>
<input
type="range"
min="1"
max="10"
step="1"
value={params.frequencyResolution}
onChange={(e) => updateParam('frequencyResolution', Number(e.target.value))}
className="w-full"
/>
<p className="text-xs text-gray-500 mt-1">Skip frequency bins for performance</p>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Time Resolution: {params.timeResolution}x
</label>
<input
type="range"
min="1"
max="10"
step="1"
value={params.timeResolution}
onChange={(e) => updateParam('timeResolution', Number(e.target.value))}
className="w-full"
/>
<p className="text-xs text-gray-500 mt-1">Skip time slices for performance</p>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Min Frequency: {params.minFreq}Hz
</label>
<input
type="range"
min="20"
max="200"
step="10"
value={params.minFreq}
onChange={(e) => updateParam('minFreq', Number(e.target.value))}
className="w-full"
/>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Max Frequency: {params.maxFreq}Hz
</label>
<input
type="range"
min="1000"
max="20000"
step="500"
value={params.maxFreq}
onChange={(e) => updateParam('maxFreq', Number(e.target.value))}
className="w-full"
/>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Amplitude Threshold: {params.amplitudeThreshold}
</label>
<input
type="range"
min="0.001"
max="0.1"
step="0.001"
value={params.amplitudeThreshold}
onChange={(e) => updateParam('amplitudeThreshold', Number(e.target.value))}
className="w-full"
/>
<p className="text-xs text-gray-500 mt-1">Minimum amplitude to include</p>
</div>
<div>
<label className="block text-xs text-gray-400 mb-1">
Window Type: {params.windowType}
</label>
<select
value={params.windowType}
onChange={(e) => updateParam('windowType', e.target.value as WindowType)}
className="w-full px-2 py-1 bg-gray-800 border border-gray-600 text-white text-xs focus:outline-none focus:border-gray-400"
>
<option value="rectangular">Rectangular (No windowing)</option>
<option value="hann">Hann (Smooth, good general purpose)</option>
<option value="hamming">Hamming (Sharper frequency response)</option>
<option value="blackman">Blackman (Minimal artifacts)</option>
</select>
<p className="text-xs text-gray-500 mt-1">Reduces clicking/popping between time frames</p>
</div>
</div>
</div>
</div>
{/* Action Buttons - Always Visible */}
<div className="p-4 border-t border-gray-800 space-y-3 flex-shrink-0">
<div className="flex space-x-2">
<button
onClick={handlePlaySingle}
disabled={isProcessing || !selected}
className="flex-1 bg-gray-700 text-white py-2 px-3 text-sm hover:bg-gray-600 disabled:bg-gray-800 disabled:cursor-not-allowed"
>
{isProcessing ? 'Processing...' : 'Play'}
</button>
<button
onClick={handleDownloadSingle}
disabled={isProcessing || !selected}
className="flex-1 bg-white text-black py-2 px-3 text-sm hover:bg-gray-200 disabled:bg-gray-600 disabled:text-gray-400 disabled:cursor-not-allowed"
>
Download
</button>
</div>
<button
onClick={handleDownloadAll}
disabled={isProcessing || images.length === 0}
className="w-full bg-gray-700 text-white py-2 px-3 text-sm hover:bg-gray-600 disabled:bg-gray-800 disabled:cursor-not-allowed border border-gray-600"
>
Download All ({images.length})
</button>
</div>
</div>
)
}

View File

@ -0,0 +1,63 @@
import { useStore } from '@nanostores/react'
import { appSettings, type GeneratorType } from '../stores'
interface GeneratorSelectorProps {
onGenerate: () => void
isGenerating: boolean
}
export default function GeneratorSelector({ onGenerate, isGenerating }: GeneratorSelectorProps) {
console.log('GeneratorSelector rendering with props:', { onGenerate, isGenerating })
const settings = useStore(appSettings)
console.log('GeneratorSelector settings:', settings)
const handleGeneratorChange = (generator: GeneratorType) => {
console.log('Changing generator to:', generator)
appSettings.set({ ...settings, selectedGenerator: generator })
}
const handleGenerateClick = () => {
console.log('Generate button clicked in GeneratorSelector!')
console.log('onGenerate function:', onGenerate)
onGenerate()
}
const generators = [
{ id: 'tixy' as const, name: 'Tixy', description: 'Mathematical expressions' },
{ id: 'picsum' as const, name: 'Picsum', description: 'Random photos' },
{ id: 'art-institute' as const, name: 'Art Institute', description: 'Famous artworks' },
{ id: 'geometric-tiles' as const, name: 'Geo Tiles', description: 'Geometric patterns' }
]
return (
<div className="border-b border-gray-800 p-6">
<div className="flex items-center space-x-6">
<button
onClick={handleGenerateClick}
disabled={isGenerating}
className="bg-white text-black px-6 py-2 font-medium hover:bg-gray-200 disabled:bg-gray-600 disabled:text-gray-400 disabled:cursor-not-allowed"
>
{isGenerating ? 'Generating...' : 'Generate'}
</button>
<div className="flex space-x-4">
{generators.map((generator) => (
<button
key={generator.id}
onClick={() => handleGeneratorChange(generator.id)}
className={`px-4 py-2 border ${
settings.selectedGenerator === generator.id
? 'border-white bg-white text-black'
: 'border-gray-600 text-gray-300 hover:border-gray-400'
}`}
>
<div className="font-medium">{generator.name}</div>
<div className="text-xs opacity-75">{generator.description}</div>
</button>
))}
</div>
</div>
</div>
)
}

View File

@ -0,0 +1,78 @@
import { useStore } from '@nanostores/react'
import { generatedImages, selectedImage, isGenerating } from '../stores'
import type { GeneratedImage } from '../stores'
export default function ImageGrid() {
const images = useStore(generatedImages)
const selected = useStore(selectedImage)
const generating = useStore(isGenerating)
const handleImageClick = (image: GeneratedImage) => {
selectedImage.set(image)
}
if (generating) {
return (
<div className="flex-1 flex items-center justify-center">
<div className="text-center text-gray-300">
<div className="text-lg mb-2">Generating images...</div>
<div className="text-sm">Please wait while we fetch your images</div>
<div className="mt-4">
<div className="animate-pulse h-8 w-8 bg-white mx-auto" style={{ imageRendering: 'pixelated' }}></div>
</div>
</div>
</div>
)
}
if (images.length === 0) {
return (
<div className="flex-1 flex items-center justify-center">
<div className="text-center text-gray-500">
<div className="text-lg mb-2">No images generated yet</div>
<div className="text-sm">Click "Generate" to start</div>
</div>
</div>
)
}
return (
<div className="flex-1 p-6">
<div className="grid grid-cols-5 gap-3">
{images.map((image) => (
<button
key={image.id}
onClick={() => handleImageClick(image)}
className={`aspect-square border-2 transition-colors ${
selected?.id === image.id
? 'border-white'
: 'border-gray-700 hover:border-gray-500'
}`}
>
<canvas
ref={(canvas) => {
if (canvas && image.canvas) {
const ctx = canvas.getContext('2d')!
canvas.width = image.canvas.width
canvas.height = image.canvas.height
canvas.style.width = '100%'
canvas.style.height = '100%'
if (image.generator === 'tixy' || image.generator === 'geometric-tiles') {
ctx.imageSmoothingEnabled = false
} else {
ctx.imageSmoothingEnabled = true
}
ctx.drawImage(image.canvas, 0, 0)
}
}}
className="w-full h-full block"
style={{ imageRendering: (image.generator === 'tixy' || image.generator === 'geometric-tiles') ? 'pixelated' : 'auto' }}
/>
</button>
))}
</div>
</div>
)
}

View File

@ -0,0 +1,153 @@
import type { GeneratedImage } from '../stores'
const searchTerms = [
'painting', 'sculpture', 'drawing', 'pottery', 'portrait', 'landscape',
'impressionist', 'modern', 'contemporary', 'abstract', 'still life',
'figure', 'nature', 'urban', 'color', 'light', 'texture', 'pattern',
'architecture', 'street', 'woman', 'man', 'cat', 'dog', 'flower', 'tree'
]
interface ArticArtwork {
id: number
title: string
artist_display: string
date_display: string
image_id: string
is_public_domain: boolean
medium_display: string
department_title: string
}
interface ArticSearchResponse {
data: ArticArtwork[]
pagination: {
total: number
limit: number
offset: number
}
}
export async function generateArtInstituteImages(count: number, size: number): Promise<GeneratedImage[]> {
const images: GeneratedImage[] = []
const maxRetries = count * 3 // Try 3x more objects to account for failures
for (let attempt = 0; attempt < maxRetries && images.length < count; attempt++) {
try {
const image = await loadArtInstituteImage(size, attempt)
if (image) {
images.push(image)
}
} catch (error) {
console.error(`Met Museum attempt ${attempt + 1} failed:`, error)
}
}
return images
}
async function loadArtInstituteImage(size: number, index: number): Promise<GeneratedImage | null> {
try {
const searchTerm = searchTerms[Math.floor(Math.random() * searchTerms.length)]
const searchUrl = `https://api.artic.edu/api/v1/artworks/search?q=${encodeURIComponent(searchTerm)}&query[term][is_public_domain]=true&limit=50&fields=id,title,artist_display,date_display,image_id,is_public_domain,medium_display,department_title`
const searchResponse = await fetch(searchUrl)
if (!searchResponse.ok) {
throw new Error('Failed to search Art Institute')
}
const searchData: ArticSearchResponse = await searchResponse.json()
if (!searchData.data || searchData.data.length === 0) {
throw new Error('No artworks found')
}
// Filter artworks that have images
const artworksWithImages = searchData.data.filter(artwork => artwork.image_id && artwork.is_public_domain)
if (artworksWithImages.length === 0) {
throw new Error('No artworks with images found')
}
const artwork = artworksWithImages[Math.floor(Math.random() * artworksWithImages.length)]
// Construct IIIF image URL - 512px width, auto height
const imageUrl = `https://www.artic.edu/iiif/2/${artwork.image_id}/full/512,/0/default.jpg`
return new Promise((resolve) => {
const img = new Image()
img.crossOrigin = 'anonymous'
img.onload = () => {
try {
const canvas = document.createElement('canvas')
canvas.width = size
canvas.height = size
const ctx = canvas.getContext('2d')!
// Calculate aspect ratio and center the image
const imgAspect = img.width / img.height
let drawWidth = size
let drawHeight = size
let drawX = 0
let drawY = 0
if (imgAspect > 1) {
drawHeight = size / imgAspect
drawY = (size - drawHeight) / 2
} else {
drawWidth = size * imgAspect
drawX = (size - drawWidth) / 2
}
// Black background
ctx.fillStyle = '#000000'
ctx.fillRect(0, 0, size, size)
// Draw the image
ctx.drawImage(img, drawX, drawY, drawWidth, drawHeight)
const imageData = ctx.getImageData(0, 0, size, size)
resolve({
id: `artic-${Date.now()}-${index}`,
canvas,
imageData,
generator: 'art-institute',
params: {
artworkId: artwork.id,
title: artwork.title || 'Untitled',
artist: artwork.artist_display || 'Unknown Artist',
date: artwork.date_display || 'Unknown Date',
medium: artwork.medium_display || 'Unknown Medium',
department: artwork.department_title || 'Unknown Department',
searchTerm
}
})
} catch (error) {
console.error('Error processing Art Institute image:', error)
resolve(null)
}
}
img.onerror = () => {
console.error('Failed to load Art Institute image:', imageUrl)
resolve(null)
}
// Set timeout for slow loading images
setTimeout(() => {
if (!img.complete) {
console.error('Art Institute image load timeout:', imageUrl)
resolve(null)
}
}, 8000)
img.src = imageUrl
})
} catch (error) {
console.error('Error creating Met Museum image:', error)
return null
}
}

View File

@ -0,0 +1,74 @@
import { GEOMETRIC_PATTERNS, renderTilesToCanvas } from '../geometric-tiles'
import type { GeneratedImage } from '../stores'
const patternNames = Object.keys(GEOMETRIC_PATTERNS)
const colorPalettes = [
{ bg: '#000000', fg: '#ffffff' },
{ bg: '#ffffff', fg: '#000000' },
{ bg: '#000000', fg: '#cccccc' },
{ bg: '#000000', fg: '#888888' },
{ bg: '#1a1a1a', fg: '#ffffff' },
{ bg: '#333333', fg: '#ffffff' },
{ bg: '#000000', fg: '#666666' },
{ bg: '#222222', fg: '#dddddd' }
]
const tileSizes = [2, 3, 4, 5, 6, 8, 10, 12, 16, 20, 24, 32]
export function generateGeometricTilesImages(count: number, size: number): GeneratedImage[] {
const images: GeneratedImage[] = []
for (let i = 0; i < count; i++) {
const patternName = patternNames[Math.floor(Math.random() * patternNames.length)]
const pattern = GEOMETRIC_PATTERNS[patternName]
const palette = colorPalettes[Math.floor(Math.random() * colorPalettes.length)]
// More varied tile size selection
let tileSize: number
const sizeVariation = Math.random()
if (sizeVariation < 0.3) {
// Small tiles (high detail)
tileSize = tileSizes[Math.floor(Math.random() * 4)] // 2-5
} else if (sizeVariation < 0.7) {
// Medium tiles
tileSize = tileSizes[4 + Math.floor(Math.random() * 4)] // 6-12
} else {
// Large tiles (bold patterns)
tileSize = tileSizes[8 + Math.floor(Math.random() * 4)] // 16-32
}
// Add some randomness to prevent identical patterns
const timeOffset = Math.random() * 100
try {
const result = renderTilesToCanvas(pattern, {
width: size,
height: size,
tileSize,
time: timeOffset,
backgroundColor: palette.bg,
foregroundColor: palette.fg
})
const image = {
id: `geotiles-${Date.now()}-${i}-${Math.floor(Math.random() * 1000)}`,
canvas: result.canvas,
imageData: result.imageData,
generator: 'geometric-tiles',
params: {
pattern: patternName,
tileSize,
colors: palette,
timeOffset
}
}
images.push(image)
} catch (error) {
console.error(`Failed to generate geometric tiles image ${i + 1}:`, error)
}
}
return images
}

74
src/generators/picsum.ts Normal file
View File

@ -0,0 +1,74 @@
import type { GeneratedImage } from '../stores'
export async function generatePicsumImages(count: number, size: number): Promise<GeneratedImage[]> {
const images: GeneratedImage[] = []
const promises: Promise<GeneratedImage | null>[] = []
for (let i = 0; i < count; i++) {
const promise = loadPicsumImage(size, i)
promises.push(promise)
}
const results = await Promise.allSettled(promises)
for (const result of results) {
if (result.status === 'fulfilled' && result.value) {
images.push(result.value)
}
}
return images
}
const highQualityIds = [
1, 2, 3, 5, 6, 8, 9, 10, 11, 13, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30,
35, 36, 37, 39, 40, 42, 43, 44, 47, 48, 49, 50, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63,
64, 65, 67, 68, 69, 70, 72, 73, 74, 75, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90,
91, 92, 96, 97, 98, 99, 100, 101, 102, 103, 104, 106, 107, 108, 109, 110, 111, 112, 113, 116
]
async function loadPicsumImage(size: number, index: number): Promise<GeneratedImage | null> {
try {
const imageId = highQualityIds[Math.floor(Math.random() * highQualityIds.length)]
const url = `https://picsum.photos/id/${imageId}/${size}/${size}?grayscale`
const img = new Image()
img.crossOrigin = 'anonymous'
return new Promise((resolve, reject) => {
img.onload = () => {
try {
const canvas = document.createElement('canvas')
canvas.width = size
canvas.height = size
const ctx = canvas.getContext('2d')!
ctx.drawImage(img, 0, 0, size, size)
const imageData = ctx.getImageData(0, 0, size, size)
resolve({
id: `picsum-${Date.now()}-${index}`,
canvas,
imageData,
generator: 'picsum',
params: { url, imageId }
})
} catch (error) {
console.error('Error processing Picsum image:', error)
resolve(null)
}
}
img.onerror = () => {
console.error('Failed to load Picsum image:', url)
resolve(null)
}
img.src = url
})
} catch (error) {
console.error('Error creating Picsum image:', error)
return null
}
}

60
src/generators/tixy.ts Normal file
View File

@ -0,0 +1,60 @@
import { compileTixyExpression, renderTixyToCanvas, EXAMPLE_EXPRESSIONS } from '../tixy-generator'
import type { GeneratedImage } from '../stores'
const expressions = Object.keys(EXAMPLE_EXPRESSIONS)
const colorPalettes = [
{ bg: '#000000', fg: '#ffffff' },
{ bg: '#ffffff', fg: '#000000' },
{ bg: '#000000', fg: '#cccccc' },
{ bg: '#000000', fg: '#888888' },
{ bg: '#1a1a1a', fg: '#ffffff' },
{ bg: '#333333', fg: '#ffffff' },
{ bg: '#000000', fg: '#666666' },
{ bg: '#222222', fg: '#dddddd' }
]
export function generateTixyImages(count: number, size: number): GeneratedImage[] {
const images: GeneratedImage[] = []
for (let i = 0; i < count; i++) {
const expression = expressions[Math.floor(Math.random() * expressions.length)]
let time: number
if (Math.random() < 0.3) {
time = Math.random() * 2
} else if (Math.random() < 0.6) {
time = Math.random() * 20
} else {
time = Math.random() * 100
}
const palette = colorPalettes[Math.floor(Math.random() * colorPalettes.length)]
try {
const compiled = compileTixyExpression(expression)
const result = renderTixyToCanvas(compiled, {
width: size,
height: size,
time,
backgroundColor: palette.bg,
foregroundColor: palette.fg
})
const image = {
id: `tixy-${Date.now()}-${i}`,
canvas: result.canvas,
imageData: result.imageData,
generator: 'tixy',
params: { expression, time, colors: palette }
}
images.push(image)
} catch (error) {
console.error(`Failed to generate Tixy image ${i + 1}:`, error)
}
}
return images
}

View File

@ -0,0 +1,74 @@
# Geometric Tiles Generator
A standalone package for generating geometric tile patterns in black and white. Similar to Tixy but focused on classic geometric and algorithmic patterns.
## Features
- 20 built-in geometric patterns
- Tile-based rendering system
- Configurable tile size and colors
- Standalone, reusable package
## Patterns
- **Checkerboard**: Classic alternating squares
- **Stripes**: Horizontal and vertical lines
- **Diagonal**: Diagonal line patterns
- **Diamond**: Diamond grid patterns
- **Cross**: Plus/cross intersections
- **Maze**: Maze-like corridors
- **Triangles**: Triangular tessellations
- **Hexagon**: Hexagonal patterns
- **Waves**: Sine wave based patterns
- **Spiral**: Spiral arm patterns
- **Concentric**: Concentric circle rings
- **Bricks**: Brick wall patterns
- **Dots**: Regular dot grids
- **Zigzag**: Zigzag patterns
- **Random**: Pseudo-random noise
- **Voronoi**: Cell-like patterns
- **Fractal**: Sierpinski-like fractals
- **Complex Maze**: Procedural maze generation
## Usage
```typescript
import { GEOMETRIC_PATTERNS, renderTilesToCanvas } from './geometric-tiles'
// Get a pattern
const checkerboard = GEOMETRIC_PATTERNS.checker
// Render to canvas
const result = renderTilesToCanvas(checkerboard, {
width: 512,
height: 512,
tileSize: 8,
time: 0,
backgroundColor: '#000000',
foregroundColor: '#ffffff'
})
// Use the canvas
document.body.appendChild(result.canvas)
```
## API
### TileExpression
- `name`: Human-readable pattern name
- `code`: String representation of the pattern logic
- `compiled`: Function that generates the pattern
### TileRenderOptions
- `width`: Canvas width in pixels
- `height`: Canvas height in pixels
- `tileSize`: Size of each tile in pixels
- `time`: Time parameter (for animated patterns)
- `backgroundColor`: Background color (hex)
- `foregroundColor`: Foreground color (hex)
The tile function receives: `(x, y, i, t, size)` where:
- `x, y`: Tile coordinates
- `i`: Linear tile index
- `t`: Time parameter
- `size`: Grid size (max of width/height in tiles)

View File

@ -0,0 +1,319 @@
import type { TileExpression } from './types'
export const GEOMETRIC_PATTERNS: Record<string, TileExpression> = {
'checker': {
name: 'Checkerboard',
code: '(x + y) % 2',
compiled: (x, y, i, t, size) => (x + y) % 2
},
'stripes_h': {
name: 'Horizontal Stripes',
code: 'y % 2',
compiled: (x, y, i, t, size) => y % 2
},
'stripes_v': {
name: 'Vertical Stripes',
code: 'x % 2',
compiled: (x, y, i, t, size) => x % 2
},
'diagonal': {
name: 'Diagonal Lines',
code: '(x - y) % 2',
compiled: (x, y, i, t, size) => (x - y) % 2
},
'diamond': {
name: 'Diamond Grid',
code: '(x + y) % 4 < 2',
compiled: (x, y, i, t, size) => (x + y) % 4 < 2
},
'cross': {
name: 'Cross Pattern',
code: 'x % 3 === 1 || y % 3 === 1',
compiled: (x, y, i, t, size) => x % 3 === 1 || y % 3 === 1
},
'maze': {
name: 'Maze Pattern',
code: '(x % 4 === 0 && y % 2 === 0) || (y % 4 === 0 && x % 2 === 0)',
compiled: (x, y, i, t, size) => (x % 4 === 0 && y % 2 === 0) || (y % 4 === 0 && x % 2 === 0)
},
'triangles': {
name: 'Triangle Grid',
code: '(x + y) % 3 === 0',
compiled: (x, y, i, t, size) => (x + y) % 3 === 0
},
'hexagon': {
name: 'Hexagonal',
code: '(x % 6 + y % 4) % 3 === 0',
compiled: (x, y, i, t, size) => (x % 6 + y % 4) % 3 === 0
},
'waves': {
name: 'Wave Pattern',
code: 'Math.sin(x * 0.5) + Math.sin(y * 0.5) > 0',
compiled: (x, y, i, t, size) => Math.sin(x * 0.5) + Math.sin(y * 0.5) > 0
},
'spiral': {
name: 'Spiral Arms',
code: 'Math.atan2(y - size/2, x - size/2) + Math.sqrt((x - size/2)**2 + (y - size/2)**2) * 0.1',
compiled: (x, y, i, t, size) => {
const centerX = size / 2
const centerY = size / 2
const angle = Math.atan2(y - centerY, x - centerX)
const distance = Math.sqrt((x - centerX)**2 + (y - centerY)**2)
return Math.sin(angle * 3 + distance * 0.2) > 0
}
},
'concentric': {
name: 'Concentric Circles',
code: 'Math.sqrt((x - size/2)**2 + (y - size/2)**2) % 8 < 4',
compiled: (x, y, i, t, size) => {
const centerX = size / 2
const centerY = size / 2
const distance = Math.sqrt((x - centerX)**2 + (y - centerY)**2)
return distance % 8 < 4
}
},
'plus': {
name: 'Plus Pattern',
code: '(x % 6 === 3) || (y % 6 === 3)',
compiled: (x, y, i, t, size) => (x % 6 === 3) || (y % 6 === 3)
},
'bricks': {
name: 'Brick Pattern',
code: 'y % 4 < 2 ? x % 4 < 2 : (x + 2) % 4 < 2',
compiled: (x, y, i, t, size) => y % 4 < 2 ? x % 4 < 2 : (x + 2) % 4 < 2
},
'dots': {
name: 'Dot Grid',
code: 'x % 4 === 2 && y % 4 === 2',
compiled: (x, y, i, t, size) => x % 4 === 2 && y % 4 === 2
},
'zigzag': {
name: 'Zigzag',
code: '(x + Math.floor(y / 2)) % 4 < 2',
compiled: (x, y, i, t, size) => (x + Math.floor(y / 2)) % 4 < 2
},
'random': {
name: 'Random Noise',
code: 'Math.sin(x * 12.9898 + y * 78.233) * 43758.5453 % 1 > 0.5',
compiled: (x, y, i, t, size) => Math.sin(x * 12.9898 + y * 78.233) * 43758.5453 % 1 > 0.5
},
'voronoi': {
name: 'Voronoi Cells',
code: 'Voronoi-like pattern',
compiled: (x, y, i, t, size) => {
// Simple Voronoi approximation using distance to grid points
const gridX = Math.floor(x / 8) * 8 + 4
const gridY = Math.floor(y / 8) * 8 + 4
const dx = x - gridX
const dy = y - gridY
return Math.sqrt(dx*dx + dy*dy) < 3
}
},
'fractal': {
name: 'Fractal Tree',
code: 'Sierpinski-like pattern',
compiled: (x, y, i, t, size) => {
// Simple Sierpinski triangle approximation
return (x & y) === 0
}
},
'maze_complex': {
name: 'Complex Maze',
code: 'Complex maze pattern',
compiled: (x, y, i, t, size) => {
const hash = (x * 374761393 + y * 668265263) % 1000000
return (hash % 3 === 0) && ((x % 4 === 0) || (y % 4 === 0))
}
},
'arrows': {
name: 'Arrow Pattern',
code: 'Directional arrows',
compiled: (x, y, i, t, size) => {
const px = x % 8
const py = y % 8
return (px === 3 || px === 4) && (py >= px - 3 && py <= 11 - px)
}
},
'stars': {
name: 'Star Pattern',
code: 'Eight-pointed stars',
compiled: (x, y, i, t, size) => {
const px = x % 12
const py = y % 12
const cx = 6
const cy = 6
const dx = Math.abs(px - cx)
const dy = Math.abs(py - cy)
return (dx + dy <= 4) && (dx <= 2 || dy <= 2)
}
},
'circuit': {
name: 'Circuit Board',
code: 'Electronic circuit pattern',
compiled: (x, y, i, t, size) => {
return ((x % 8 === 0 || x % 8 === 7) && y % 4 === 0) ||
((y % 8 === 0 || y % 8 === 7) && x % 4 === 0) ||
(x % 16 === 8 && y % 16 === 8)
}
},
'tribal': {
name: 'Tribal Pattern',
code: 'Tribal geometric design',
compiled: (x, y, i, t, size) => {
const px = x % 16
const py = y % 16
return ((px + py) % 8 < 2) || ((px - py + 16) % 8 < 2)
}
},
'islamic': {
name: 'Islamic Tiles',
code: 'Islamic geometric pattern',
compiled: (x, y, i, t, size) => {
const px = x % 24
const py = y % 24
return ((px + py) % 6 === 0) || ((px - py + 24) % 6 === 0) ||
(px % 8 === 4 && py % 8 === 4)
}
},
'weave': {
name: 'Basket Weave',
code: 'Woven pattern',
compiled: (x, y, i, t, size) => {
const blockX = Math.floor(x / 4) % 2
const blockY = Math.floor(y / 4) % 2
return blockX === blockY ? x % 2 === 0 : y % 2 === 0
}
},
'scales': {
name: 'Dragon Scales',
code: 'Overlapping scale pattern',
compiled: (x, y, i, t, size) => {
const offset = (y % 8 < 4) ? 2 : 0
const px = (x + offset) % 8
const py = y % 8
return py < 4 && px >= 2 && px <= 5
}
},
'honeycomb': {
name: 'Honeycomb',
code: 'Hexagonal honeycomb',
compiled: (x, y, i, t, size) => {
const hex = (x % 6) + (y % 4) * 1.5
return Math.floor(hex) % 3 === 0
}
},
'labyrinth': {
name: 'Labyrinth',
code: 'Classical labyrinth',
compiled: (x, y, i, t, size) => {
const cx = size / 2
const cy = size / 2
const dx = x - cx
const dy = y - cy
const angle = Math.atan2(dy, dx)
const radius = Math.sqrt(dx*dx + dy*dy)
return Math.floor(radius + angle * 4) % 6 < 3
}
},
'tetris': {
name: 'Tetris Blocks',
code: 'Tetris piece pattern',
compiled: (x, y, i, t, size) => {
const shapes = [
[[1,1,1,1]], // I
[[1,1],[1,1]], // O
[[0,1,0],[1,1,1]], // T
[[1,1,0],[0,1,1]], // S
]
const shapeId = (Math.floor(x/4) + Math.floor(y/4)) % shapes.length
const shape = shapes[shapeId]
const sx = x % 4
const sy = y % shape.length
return sx < shape[sy]?.length && shape[sy][sx] === 1
}
},
'aztec': {
name: 'Aztec Pattern',
code: 'Pre-Columbian design',
compiled: (x, y, i, t, size) => {
const px = x % 16
const py = y % 16
return ((px === 0 || px === 15) && (py >= 4 && py <= 11)) ||
((py === 0 || py === 15) && (px >= 4 && px <= 11)) ||
((px >= 6 && px <= 9) && (py >= 6 && py <= 9))
}
},
'optical': {
name: 'Optical Illusion',
code: 'Op-art pattern',
compiled: (x, y, i, t, size) => {
const wave1 = Math.sin(x * 0.3) * 4
const wave2 = Math.sin(y * 0.3) * 4
return (x + wave1 + y + wave2) % 8 < 4
}
},
'neurons': {
name: 'Neural Network',
code: 'Connected nodes',
compiled: (x, y, i, t, size) => {
const isNode = (x % 12 === 6) && (y % 12 === 6)
const isConnection = ((x % 12 === 6) && (y % 4 === 0)) ||
((y % 12 === 6) && (x % 4 === 0))
return isNode || isConnection
}
},
'dna': {
name: 'DNA Helix',
code: 'Double helix structure',
compiled: (x, y, i, t, size) => {
const helix1 = Math.sin(y * 0.5) * 4 + 8
const helix2 = Math.sin(y * 0.5 + Math.PI) * 4 + 8
return Math.abs(x - helix1) < 1.5 || Math.abs(x - helix2) < 1.5 ||
(y % 8 === 0 && x >= Math.min(helix1, helix2) && x <= Math.max(helix1, helix2))
}
},
'mandala': {
name: 'Mandala Pattern',
code: 'Radial mandala design',
compiled: (x, y, i, t, size) => {
const cx = size / 2
const cy = size / 2
const dx = x - cx
const dy = y - cy
const angle = Math.atan2(dy, dx)
const radius = Math.sqrt(dx*dx + dy*dy)
return Math.sin(angle * 8) * Math.sin(radius * 0.5) > 0
}
},
'molecular': {
name: 'Molecular Structure',
code: 'Chemical bonds',
compiled: (x, y, i, t, size) => {
const atomX = x % 20 === 10
const atomY = y % 20 === 10
const bondH = (y % 20 === 10) && ((x % 10) < 2 || (x % 10) > 7)
const bondV = (x % 20 === 10) && ((y % 10) < 2 || (y % 10) > 7)
return (atomX && atomY) || bondH || bondV
}
},
'cyberpunk': {
name: 'Cyberpunk Grid',
code: 'Futuristic data pattern',
compiled: (x, y, i, t, size) => {
const hash1 = (x * 73 + y * 37) % 256
const hash2 = (x * 137 + y * 73) % 256
return ((x % 8 === 0 || y % 8 === 0) && hash1 > 128) ||
((x + y) % 16 === 0 && hash2 > 200)
}
},
'glitch': {
name: 'Glitch Effect',
code: 'Digital corruption',
compiled: (x, y, i, t, size) => {
const noise = Math.sin(x * 123.456 + y * 789.012) * 1000
const glitch = Math.floor(noise) % 16
return ((x % 4 === 0) && (y % 2 === glitch % 2)) ||
((y % 8 === 0) && (x % 3 === glitch % 3))
}
}
}

View File

@ -0,0 +1,29 @@
export interface TileParams {
x: number
y: number
i: number
t: number
size: number
}
export type TileFunction = (x: number, y: number, i: number, t: number, size: number) => number | boolean
export interface TileExpression {
name: string
code: string
compiled: TileFunction
}
export interface TileRenderOptions {
width: number
height: number
tileSize: number
time: number
backgroundColor: string
foregroundColor: string
}
export interface TileResult {
canvas: HTMLCanvasElement
imageData: ImageData
}

View File

@ -0,0 +1,9 @@
export { GEOMETRIC_PATTERNS } from './core/patterns'
export { renderTilesToCanvas, renderTilesToImageData } from './renderer/canvas'
export type {
TileParams,
TileFunction,
TileExpression,
TileRenderOptions,
TileResult
} from './core/types'

View File

@ -0,0 +1,66 @@
import type { TileExpression, TileRenderOptions, TileResult } from '../core/types'
export function renderTilesToCanvas(
pattern: TileExpression,
options: TileRenderOptions
): TileResult {
const { width, height, tileSize, time, backgroundColor, foregroundColor } = options
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
const ctx = canvas.getContext('2d')!
// Fill background
ctx.fillStyle = backgroundColor
ctx.fillRect(0, 0, width, height)
// Set foreground color
ctx.fillStyle = foregroundColor
// Calculate grid dimensions
const tilesX = Math.ceil(width / tileSize)
const tilesY = Math.ceil(height / tileSize)
// Render each tile
for (let y = 0; y < tilesY; y++) {
for (let x = 0; x < tilesX; x++) {
const tileIndex = y * tilesX + x
try {
const result = pattern.compiled(x, y, tileIndex, time, Math.max(tilesX, tilesY))
// Convert result to boolean
const shouldFill = typeof result === 'boolean' ? result : result > 0.5
if (shouldFill) {
ctx.fillRect(
x * tileSize,
y * tileSize,
tileSize,
tileSize
)
}
} catch (error) {
// Skip tiles that cause errors
console.warn(`Error rendering tile at (${x}, ${y}):`, error)
}
}
}
const imageData = ctx.getImageData(0, 0, width, height)
return {
canvas,
imageData
}
}
export function renderTilesToImageData(
pattern: TileExpression,
options: TileRenderOptions
): ImageData {
const result = renderTilesToCanvas(pattern, options)
return result.imageData
}

1
src/index.css Normal file
View File

@ -0,0 +1 @@
@import "tailwindcss";

10
src/main.tsx Normal file
View File

@ -0,0 +1,10 @@
import { StrictMode } from 'react'
import { createRoot } from 'react-dom/client'
import './index.css'
import App from './App.tsx'
createRoot(document.getElementById('root')!).render(
<StrictMode>
<App />
</StrictMode>,
)

View File

@ -0,0 +1,102 @@
# Spectral Synthesis Library
A library for synthesizing audio from images using additive synthesis and perceptual frequency mapping.
## Features
- **Image-to-Audio Synthesis**: Convert any image to audio by treating it as a spectrogram
- **Perceptual Accuracy**: Uses Mel-scale frequency mapping for better perceptual results
- **Spectral Peak Detection**: Only synthesizes significant frequency components
- **Temporal Smoothing**: Maintains coherent trajectories between time frames
- **Auto-Detection**: Automatically handles different image types (spectrograms vs diagrams)
## Quick Start
```typescript
import { synthesizeFromImage, downloadWAV } from './spectral-synthesis'
// Simple usage
const audioData = synthesizeFromImage(imageData)
// With custom parameters
const audioData = synthesizeFromImage(imageData, {
duration: 10,
minFreq: 100,
maxFreq: 10000,
maxPartials: 200
})
// Export as WAV
downloadWAV(audioData, 44100, 'my-audio.wav')
```
## API Reference
### Main Functions
#### `synthesizeFromImage(imageData, params?)`
- **imageData**: `ImageData` - Canvas image data
- **params**: `Partial<SynthesisParams>` - Optional parameters
- **Returns**: `Float32Array` - Audio samples
### Types
#### `SynthesisParams`
```typescript
interface SynthesisParams {
duration: number // Audio duration in seconds
minFreq: number // Minimum frequency in Hz
maxFreq: number // Maximum frequency in Hz
sampleRate: number // Sample rate in Hz
frequencyResolution: number // Frequency bin downsampling
timeResolution: number // Time slice downsampling
amplitudeThreshold: number // Minimum amplitude threshold
maxPartials: number // Maximum simultaneous partials
}
```
## Project Structure
```
spectral-synthesis/
├── core/
│ ├── types.ts # Type definitions
│ ├── utils.ts # Helper functions
│ └── synthesizer.ts # Main synthesis logic
├── audio/
│ └── export.ts # Audio export utilities
└── index.ts # Main exports
```
## Algorithm
1. **Image Analysis**: Auto-detect if colors should be inverted
2. **Frequency Mapping**: Convert image rows to Mel-scale frequencies
3. **Peak Detection**: Find significant spectral components
4. **Temporal Smoothing**: Apply continuity between time frames
5. **Perceptual Weighting**: Apply psychoacoustic amplitude scaling
6. **Additive Synthesis**: Generate and sum sine waves
## Usage Examples
### Basic Synthesis
```typescript
const canvas = document.createElement('canvas')
const ctx = canvas.getContext('2d')
// ... load image to canvas
const imageData = ctx.getImageData(0, 0, canvas.width, canvas.height)
const audio = synthesizeFromImage(imageData)
```
### Advanced Usage
```typescript
import { ImageToAudioSynthesizer } from './spectral-synthesis'
const synthesizer = new ImageToAudioSynthesizer({
duration: 5,
maxPartials: 150
})
const result = synthesizer.synthesize(imageData)
console.log(`Generated ${result.duration}s of audio`)
```

View File

@ -0,0 +1,78 @@
/**
* Create WAV buffer from audio data
*/
export function createWAVBuffer(audioData: Float32Array, sampleRate: number): ArrayBuffer {
const length = audioData.length
const buffer = new ArrayBuffer(44 + length * 2)
const view = new DataView(buffer)
// WAV header
writeString(view, 0, 'RIFF')
view.setUint32(4, 36 + length * 2, true) // file length - 8
writeString(view, 8, 'WAVE')
writeString(view, 12, 'fmt ')
view.setUint32(16, 16, true) // format chunk length
view.setUint16(20, 1, true) // PCM format
view.setUint16(22, 1, true) // mono
view.setUint32(24, sampleRate, true)
view.setUint32(28, sampleRate * 2, true) // byte rate
view.setUint16(32, 2, true) // block align
view.setUint16(34, 16, true) // bits per sample
writeString(view, 36, 'data')
view.setUint32(40, length * 2, true) // data chunk length
// Convert float samples to 16-bit PCM
let offset = 44
for (let i = 0; i < length; i++) {
const sample = Math.max(-1, Math.min(1, audioData[i]))
view.setInt16(offset, sample * 0x7FFF, true)
offset += 2
}
return buffer
}
function writeString(view: DataView, offset: number, string: string) {
for (let i = 0; i < string.length; i++) {
view.setUint8(offset + i, string.charCodeAt(i))
}
}
/**
* Download audio as WAV file
*/
export function downloadWAV(audioData: Float32Array, sampleRate: number, filename: string) {
const buffer = createWAVBuffer(audioData, sampleRate)
const blob = new Blob([buffer], { type: 'audio/wav' })
const url = URL.createObjectURL(blob)
const a = document.createElement('a')
a.href = url
a.download = filename
a.click()
URL.revokeObjectURL(url)
}
/**
* Play audio in browser
*/
export async function playAudio(audioData: Float32Array, sampleRate: number): Promise<void> {
const audioContext = new (window.AudioContext || (window as any).webkitAudioContext)()
if (audioContext.sampleRate !== sampleRate) {
console.warn(`Audio context sample rate (${audioContext.sampleRate}) differs from data sample rate (${sampleRate})`)
}
const buffer = audioContext.createBuffer(1, audioData.length, sampleRate)
buffer.copyToChannel(audioData, 0)
const source = audioContext.createBufferSource()
source.buffer = buffer
source.connect(audioContext.destination)
source.start()
return new Promise(resolve => {
source.onended = () => resolve()
})
}

View File

@ -0,0 +1,175 @@
import type { SynthesisParams, SpectralPeak, SynthesisResult } from './types'
import {
hzToMel,
melToHz,
detectSpectralPeaks,
perceptualAmplitudeWeighting,
shouldInvertImage,
extractSpectrum,
applyWindow
} from './utils'
export class ImageToAudioSynthesizer {
private params: SynthesisParams
constructor(params: Partial<SynthesisParams> = {}) {
this.params = {
duration: 5,
minFreq: 20,
maxFreq: 20000,
sampleRate: 44100,
frequencyResolution: 1,
timeResolution: 1,
amplitudeThreshold: 0.01,
maxPartials: 100,
windowType: 'hann',
...params
}
}
/**
* Synthesize audio from image data
*/
synthesize(imageData: ImageData): SynthesisResult {
const { width, height, data } = imageData
const {
duration,
minFreq,
maxFreq,
sampleRate,
frequencyResolution,
timeResolution,
amplitudeThreshold,
maxPartials,
windowType
} = this.params
// Detect image type
const invert = shouldInvertImage(imageData)
// Calculate synthesis parameters
const totalSamples = Math.floor(duration * sampleRate)
const effectiveWidth = Math.floor(width / timeResolution)
const effectiveHeight = Math.floor(height / frequencyResolution)
const samplesPerColumn = totalSamples / effectiveWidth
const audio = new Float32Array(totalSamples)
// Pre-calculate mel-scale frequency mapping
const minMel = hzToMel(minFreq)
const maxMel = hzToMel(maxFreq)
// Storage for temporal smoothing
const previousAmplitudes = new Float32Array(effectiveHeight)
const smoothingFactor = 0.3
// Process each time slice
for (let col = 0; col < effectiveWidth; col++) {
const sourceCol = col * timeResolution
const startSample = Math.floor(col * samplesPerColumn)
const endSample = Math.floor((col + 1) * samplesPerColumn)
// Extract spectrum for this time slice
const spectrum = extractSpectrum(imageData, sourceCol, effectiveHeight, frequencyResolution, invert)
// Detect spectral peaks
const peaks = detectSpectralPeaks(spectrum, Math.min(amplitudeThreshold, 0.01))
// Generate partials from peaks
const partials: SpectralPeak[] = []
for (const peakRow of peaks) {
// Mel-scale frequency mapping (high freq at top)
const melValue = maxMel - (peakRow / (effectiveHeight - 1)) * (maxMel - minMel)
const frequency = melToHz(melValue)
let amplitude = spectrum[peakRow]
// Apply temporal smoothing
if (col > 0) {
amplitude = smoothingFactor * previousAmplitudes[peakRow] + (1 - smoothingFactor) * amplitude
}
previousAmplitudes[peakRow] = amplitude
// Apply perceptual weighting
amplitude = perceptualAmplitudeWeighting(frequency, amplitude)
// Use zero phase for simplicity
const phase = 0
if (amplitude > Math.min(amplitudeThreshold, 0.005)) {
partials.push({ frequency, amplitude, phase })
}
}
// Sort by amplitude and limit partials
partials.sort((a, b) => b.amplitude - a.amplitude)
const limitedPartials = partials.slice(0, maxPartials)
// Generate audio for this time slice
const chunkLength = endSample - startSample
const audioChunk = new Float32Array(chunkLength)
for (const { frequency, amplitude, phase } of limitedPartials) {
for (let i = 0; i < chunkLength; i++) {
const t = (startSample + i) / sampleRate
audioChunk[i] += amplitude * Math.sin(2 * Math.PI * frequency * t + phase)
}
}
// Apply windowing to reduce artifacts
const windowedChunk = applyWindow(audioChunk, windowType)
// Add windowed chunk to final audio
for (let i = 0; i < chunkLength && startSample + i < totalSamples; i++) {
audio[startSample + i] += windowedChunk[i]
}
}
// Normalize to prevent clipping
let maxAmplitude = 0
for (let i = 0; i < audio.length; i++) {
const absValue = Math.abs(audio[i])
if (absValue > maxAmplitude) {
maxAmplitude = absValue
}
}
if (maxAmplitude > 1) {
for (let i = 0; i < audio.length; i++) {
audio[i] /= maxAmplitude
}
}
return {
audio,
sampleRate,
duration
}
}
/**
* Update synthesis parameters
*/
updateParams(newParams: Partial<SynthesisParams>): void {
this.params = { ...this.params, ...newParams }
}
/**
* Get current parameters
*/
getParams(): SynthesisParams {
return { ...this.params }
}
}
/**
* Convenience function for quick synthesis
*/
export function synthesizeFromImage(
imageData: ImageData,
params: Partial<SynthesisParams> = {}
): Float32Array {
const synthesizer = new ImageToAudioSynthesizer(params)
const result = synthesizer.synthesize(imageData)
return result.audio
}

View File

@ -0,0 +1,25 @@
export type WindowType = 'rectangular' | 'hann' | 'hamming' | 'blackman'
export interface SynthesisParams {
duration: number
minFreq: number
maxFreq: number
sampleRate: number
frequencyResolution: number
timeResolution: number
amplitudeThreshold: number
maxPartials: number
windowType: WindowType
}
export interface SpectralPeak {
frequency: number
amplitude: number
phase: number
}
export interface SynthesisResult {
audio: Float32Array
sampleRate: number
duration: number
}

View File

@ -0,0 +1,143 @@
/**
* Convert frequency from Hz to Mel scale
*/
export function hzToMel(freq: number): number {
return 2595 * Math.log10(1 + freq / 700)
}
/**
* Convert frequency from Mel scale to Hz
*/
export function melToHz(mel: number): number {
return 700 * (Math.pow(10, mel / 2595) - 1)
}
/**
* Detect spectral peaks in amplitude spectrum
*/
export function detectSpectralPeaks(spectrum: number[], threshold: number = 0.01): number[] {
const peaks: number[] = []
// Find significant components above threshold
for (let i = 0; i < spectrum.length; i++) {
if (spectrum[i] > threshold) {
peaks.push(i)
}
}
// Fallback: use local maxima with lower threshold if no peaks found
if (peaks.length === 0) {
for (let i = 1; i < spectrum.length - 1; i++) {
if (spectrum[i] > spectrum[i - 1] &&
spectrum[i] > spectrum[i + 1] &&
spectrum[i] > 0.001) {
peaks.push(i)
}
}
}
return peaks
}
/**
* Apply perceptual amplitude weighting
*/
export function perceptualAmplitudeWeighting(freq: number, amplitude: number): number {
// Gentle boost around 1kHz for perceptual accuracy
const normalizedFreq = Math.log10(freq / 1000)
const weight = Math.exp(-normalizedFreq * normalizedFreq * 0.5) * 0.5 + 0.5
return amplitude * weight
}
/**
* Auto-detect if image colors should be inverted
*/
export function shouldInvertImage(imageData: ImageData): boolean {
const { width, height, data } = imageData
let totalBrightness = 0
for (let i = 0; i < data.length; i += 4) {
const gray = 0.299 * data[i] + 0.587 * data[i + 1] + 0.114 * data[i + 2]
totalBrightness += gray / 255
}
const meanBrightness = totalBrightness / (width * height)
return meanBrightness > 0.5 // Bright background = diagram
}
/**
* Generate windowing function
*/
export function generateWindow(length: number, windowType: string): Float32Array {
const window = new Float32Array(length)
switch (windowType) {
case 'hann':
for (let i = 0; i < length; i++) {
window[i] = 0.5 * (1 - Math.cos(2 * Math.PI * i / (length - 1)))
}
break
case 'hamming':
for (let i = 0; i < length; i++) {
window[i] = 0.54 - 0.46 * Math.cos(2 * Math.PI * i / (length - 1))
}
break
case 'blackman':
for (let i = 0; i < length; i++) {
const factor = 2 * Math.PI * i / (length - 1)
window[i] = 0.42 - 0.5 * Math.cos(factor) + 0.08 * Math.cos(2 * factor)
}
break
case 'rectangular':
default:
window.fill(1.0)
break
}
return window
}
/**
* Apply windowing function to audio chunk
*/
export function applyWindow(audioChunk: Float32Array, windowType: string): Float32Array {
const window = generateWindow(audioChunk.length, windowType)
const windowed = new Float32Array(audioChunk.length)
for (let i = 0; i < audioChunk.length; i++) {
windowed[i] = audioChunk[i] * window[i]
}
return windowed
}
/**
* Extract grayscale spectrum from image column
*/
export function extractSpectrum(
imageData: ImageData,
col: number,
height: number,
frequencyResolution: number,
invert: boolean
): number[] {
const { width, data } = imageData
const spectrum: number[] = []
for (let row = 0; row < height; row++) {
const sourceRow = row * frequencyResolution
const idx = (sourceRow * width + col) * 4
const r = data[idx]
const g = data[idx + 1]
const b = data[idx + 2]
let amplitude = (0.299 * r + 0.587 * g + 0.114 * b) / 255
if (invert) amplitude = 1 - amplitude
spectrum.push(amplitude)
}
return spectrum
}

View File

@ -0,0 +1,22 @@
// Core synthesis
export { ImageToAudioSynthesizer, synthesizeFromImage } from './core/synthesizer'
export type { SynthesisParams, SpectralPeak, SynthesisResult, WindowType } from './core/types'
// Utilities
export {
hzToMel,
melToHz,
detectSpectralPeaks,
perceptualAmplitudeWeighting,
shouldInvertImage,
extractSpectrum,
generateWindow,
applyWindow
} from './core/utils'
// Audio export
export {
createWAVBuffer,
downloadWAV,
playAudio
} from './audio/export'

49
src/stores/index.ts Normal file
View File

@ -0,0 +1,49 @@
import { atom } from 'nanostores'
import { persistentAtom } from '@nanostores/persistent'
import type { SynthesisParams, WindowType } from '../spectral-synthesis'
export type GeneratorType = 'tixy' | 'picsum' | 'art-institute' | 'geometric-tiles'
export interface GeneratedImage {
id: string
canvas: HTMLCanvasElement
imageData: ImageData
generator: GeneratorType
params: any
}
export interface AppSettings {
selectedGenerator: GeneratorType
gridSize: number
imageSize: number
backgroundColor: string
foregroundColor: string
}
export const appSettings = atom<AppSettings>({
selectedGenerator: 'tixy',
gridSize: 20,
imageSize: 64,
backgroundColor: '#000000',
foregroundColor: '#ffffff'
})
export const generatedImages = atom<GeneratedImage[]>([])
export const selectedImage = atom<GeneratedImage | null>(null)
export const isGenerating = atom<boolean>(false)
export const panelOpen = atom<boolean>(false)
export const synthesisParams = atom<SynthesisParams>({
duration: 5,
minFreq: 20,
maxFreq: 20000,
sampleRate: 44100,
frequencyResolution: 1,
timeResolution: 1,
amplitudeThreshold: 0.01,
maxPartials: 100,
windowType: 'rectangular'
})

View File

@ -0,0 +1,59 @@
# Tixy Generator
A standalone module for generating Tixy-like shader patterns in JavaScript/TypeScript. Create beautiful mathematical art using compact expressions.
## What is Tixy?
Tixy is a minimalist programming language designed by Martin Kleppe for creating visual patterns using 4 variables:
- `t` - time
- `i` - index (pixel index in the grid)
- `x` - x coordinate
- `y` - y coordinate
## Usage
```typescript
import { compileTixyExpression, renderTixyToCanvas } from '@src/tixy-generator'
// Compile a Tixy expression
const expression = compileTixyExpression('sin(t)*x')
// Render to canvas
const result = renderTixyToCanvas(expression, {
width: 64,
height: 64,
time: 0,
backgroundColor: '#000000',
foregroundColor: '#ffffff'
})
// Add to DOM
document.body.appendChild(result.canvas)
```
## Example Expressions
- `sin(t)` - Simple sine wave
- `sin(t)*x` - Sine wave with x scaling
- `sin(t+x/8)*y` - Traveling wave
- `(x+t)*(y+t)` - Expanding diagonal pattern
- `sin(x*y+t)` - XY interference pattern
- `sin((x-8)**2+(y-8)**2+t)/2` - Ripple from center
- `x%2*y%2` - Checkerboard pattern
## API
### Core Functions
- `compileTixyExpression(code: string): TixyExpression` - Compile expression string
- `evaluateTixyExpression(expression, t, i, x, y): number` - Evaluate at coordinates
- `renderTixyToCanvas(expression, options): TixyResult` - Render to canvas
- `renderTixyToImageData(expression, options): ImageData` - Render to ImageData
### Types
- `TixyExpression` - Compiled expression object
- `TixyRenderOptions` - Rendering configuration
- `TixyResult` - Canvas and ImageData result
Math functions like `sin`, `cos`, `sqrt` etc. are available without the `Math.` prefix.

View File

@ -0,0 +1,86 @@
import type { TixyFunction, TixyExpression } from './types'
const MATH_METHODS = [
'abs', 'acos', 'asin', 'atan', 'atan2', 'ceil', 'cos', 'exp', 'floor',
'log', 'max', 'min', 'pow', 'random', 'round', 'sin', 'sqrt', 'tan'
]
export function compileTixyExpression(code: string): TixyExpression {
let processedCode = code.trim()
for (const method of MATH_METHODS) {
const regex = new RegExp(`\\b${method}\\(`, 'g')
processedCode = processedCode.replace(regex, `Math.${method}(`)
}
try {
const compiled = new Function('t', 'i', 'x', 'y', `return (${processedCode})`) as TixyFunction
compiled(0, 0, 0, 0)
return {
code: processedCode,
compiled
}
} catch (error) {
throw new Error(`Failed to compile Tixy expression: ${error}`)
}
}
export function evaluateTixyExpression(
expression: TixyExpression,
t: number,
i: number,
x: number,
y: number
): number {
try {
const result = expression.compiled(t, i, x, y)
return typeof result === 'number' && !isNaN(result) ? result : 0
} catch {
return 0
}
}
export const EXAMPLE_EXPRESSIONS = {
'sin(t)': 'Simple sine wave',
'sin(t)*x': 'Sine wave with x scaling',
'sin(t+x/8)*y': 'Traveling wave',
'(x+t)*(y+t)': 'Expanding diagonal pattern',
'sin(x*y+t)': 'XY interference pattern',
'random()-0.5': 'Random noise',
'sin(t)*cos(x*y/99)': 'Complex interference',
'(x-8)*(y-8)/64': 'Centered gradient',
'sin((x-8)**2+(y-8)**2+t)/2': 'Ripple from center',
'x%2*y%2': 'Checkerboard pattern',
'sin(t*2)*cos(x)*sin(y)': ' 3D wave interference',
'abs(x-8)+abs(y-8)-t*4': 'Diamond expansion',
'sin(x/4)*sin(y/4)*sin(t)': 'Grid waves',
'(x*y)%16': 'Multiplication table',
'sin(t+x*y/16)': 'Diagonal waves',
'cos(t)*sin(x+y)': 'Corner waves',
'min(x,y)-t*2': 'Corner fill',
'max(x,y)+sin(t)': 'L-shape waves',
'sin(t+x)*cos(t+y)': 'Phase shift grid',
'(x+y+t*4)%8': 'Diagonal stripes',
'sin(x*x+y*y+t)': 'Radial sine',
'x*y/(i+1)': 'Index modulation',
'tan(t)*sin(x*y/32)': 'Tangent interference',
'floor(x/4)*floor(y/4)': 'Block pattern',
'sin(t*3)*exp(-((x-8)**2+(y-8)**2)/32)': 'Gaussian pulse',
'cos(x+t)*cos(y+t)': 'Corner cosines',
'(x^y)%4': 'XOR pattern',
'sin(sqrt(x*x+y*y)+t)': 'Circular waves',
'abs(sin(t*x)*cos(t*y))': 'Absolute waves',
'(x*t+y*t)%16': 'Time multiplication',
'sin(t)*pow(x/16,2)': 'Parabolic wave',
'cos(t+x/2)*sin(t+y/2)': 'Phase diagonal',
'min(max(x-t,0),max(y-t,0))': 'Corner sweep',
'sin(t+i/4)': 'Index time wave',
'random()*sin(t+x+y)': 'Random wave',
'floor(sin(t+x)*4)*floor(cos(t+y)*4)': 'Quantized waves',
'abs(x-y)+sin(t*2)': 'Anti-diagonal waves',
'pow(sin(t),2)*x*y/64': 'Squared sine',
'sin(t*x/8)*cos(t*y/8)': 'Scaled time waves',
'x*sin(t)+y*cos(t)': 'Rotating gradient'
}

View File

@ -0,0 +1,27 @@
export interface TixyParams {
t: number // time
i: number // index
x: number // x coordinate
y: number // y coordinate
}
export type TixyFunction = (t: number, i: number, x: number, y: number) => number
export interface TixyExpression {
code: string
compiled: TixyFunction
}
export interface TixyRenderOptions {
width: number
height: number
time: number
backgroundColor?: string
foregroundColor?: string
threshold?: number
}
export interface TixyResult {
imageData: ImageData
canvas: HTMLCanvasElement
}

View File

@ -0,0 +1,3 @@
export { compileTixyExpression, evaluateTixyExpression, EXAMPLE_EXPRESSIONS } from './core/evaluator'
export { renderTixyToCanvas, renderTixyToImageData } from './renderer/canvas'
export type { TixyParams, TixyFunction, TixyExpression, TixyRenderOptions, TixyResult } from './core/types'

View File

@ -0,0 +1,73 @@
import type { TixyExpression, TixyRenderOptions, TixyResult } from '../core/types'
import { evaluateTixyExpression } from '../core/evaluator'
export function renderTixyToCanvas(
expression: TixyExpression,
options: TixyRenderOptions
): TixyResult {
const {
width,
height,
time,
backgroundColor = '#000000',
foregroundColor = '#ffffff',
threshold = 0
} = options
const canvas = document.createElement('canvas')
canvas.width = width
canvas.height = height
const ctx = canvas.getContext('2d')!
const imageData = ctx.createImageData(width, height)
const data = imageData.data
const bgColor = hexToRgb(backgroundColor)
const fgColor = hexToRgb(foregroundColor)
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
const i = y * width + x
const value = evaluateTixyExpression(expression, time, i, x, y)
const intensity = Math.abs(value) > threshold ? Math.min(Math.abs(value), 1) : 0
const pixelIndex = (y * width + x) * 4
if (intensity > 0) {
data[pixelIndex] = Math.round(fgColor.r * intensity + bgColor.r * (1 - intensity))
data[pixelIndex + 1] = Math.round(fgColor.g * intensity + bgColor.g * (1 - intensity))
data[pixelIndex + 2] = Math.round(fgColor.b * intensity + bgColor.b * (1 - intensity))
} else {
data[pixelIndex] = bgColor.r
data[pixelIndex + 1] = bgColor.g
data[pixelIndex + 2] = bgColor.b
}
data[pixelIndex + 3] = 255
}
}
ctx.putImageData(imageData, 0, 0)
return {
imageData,
canvas
}
}
export function renderTixyToImageData(
expression: TixyExpression,
options: TixyRenderOptions
): ImageData {
return renderTixyToCanvas(expression, options).imageData
}
function hexToRgb(hex: string): { r: number; g: number; b: number } {
const result = /^#?([a-f\d]{2})([a-f\d]{2})([a-f\d]{2})$/i.exec(hex)
return result ? {
r: parseInt(result[1], 16),
g: parseInt(result[2], 16),
b: parseInt(result[3], 16)
} : { r: 0, g: 0, b: 0 }
}