chore: init clean tree

This commit is contained in:
2025-12-17 23:19:04 +02:00
commit 01d96d3200
45 changed files with 4152 additions and 0 deletions

5
.editorconfig Normal file
View File

@@ -0,0 +1,5 @@
[*]
root = true
indent_size = 4
indent_style = tab
insert_final_newline = true

19
.gitignore vendored Normal file
View File

@@ -0,0 +1,19 @@
node_modules
out
dist
*.tgz
coverage
*.lcov
logs
_.log
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
.eslintcache
.cache
*.tsbuildinfo
.idea
.DS_Store

1
.prettierignore Normal file
View File

@@ -0,0 +1 @@
*.sh

7
.prettierrc Normal file
View File

@@ -0,0 +1,7 @@
{
"trailingComma": "none",
"tabWidth": 4,
"useTabs": true,
"semi": true,
"singleQuote": false
}

25
LICENSE Normal file
View File

@@ -0,0 +1,25 @@
MIT License
Copyright (c) 2025 OCbwoy3
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Additional Asset Disclaimer
The MIT license in this repository applies only to the original source code and documentation, not the assets.

26
README.md Normal file
View File

@@ -0,0 +1,26 @@
![](asset/preview.png)
# DEVICE_CONTACT
greetd greeter inspired by Deltarune. For ricing purposes or similar.
***LINUX ONLY***
## Setup
***WARNING: THIS IS NOT SECURE.*** This project is intended for Linux ricing purposes.
It will automatically log into your account because it uses plaintext credentials WHICH ARE NOT SECURE!
Put your username in `/etc/deltaboot/private/username`.
Put your password in `/etc/deltaboot/private/password`.
Private credentials are expected to be owned by root:
- `/etc/deltaboot/private` - `chmod 700` - `u+rx` `(dr-x------)`
- `/etc/deltaboot/private/*` - `chmod 600` - `u+r` `(-r--------)`
## ASSETS
This repo does not contain any Deltarune assets. You will need to obtain them manually.
See `asset/README.md` for details.

13
asset/.gitignore vendored Normal file
View File

@@ -0,0 +1,13 @@
font/*
AUDIO_APPEARANCE.png
AUDIO_ANOTHERHIM.ogg
AUDIO_DRONE.ogg
goner_bg_loop.mkv
snd_menumove.wav
snd_select.wav
*
!chr/
!.gitignore
!icon.png
!preview.png
!README.md

61
asset/README.md Normal file
View File

@@ -0,0 +1,61 @@
## Asset & Copyright Disclaimer
This repository does **not** include any copyrighted assets from *DELTARUNE*. No game assets are distributed here.
No game assets are distributed with this repository. You will need to find and extract the assets yourself, provided you bought [the game](https://store.steampowered.com/app/1671210/DELTARUNE/). (Or are using the free demo from Steam)
*DELTARUNE* and all related assets are © Toby Fox.
This is a fan-made, non-commercial project and is not affiliated with or endorsed by the games creators.
### Asset layout
These files are not included in this repository and must be supplied by the user.
```
src/asset
├── AUDIO_ANOTHERHIM.ogg
├── AUDIO_APPEARANCE.wav
├── AUDIO_DRONE.ogg
├── bg_fountain1_0.png
├── chr
│ ├── kris.png
│ ├── noelle.png
│ ├── ralsei.png
│ └── susie.png
├── font
│ ├── fnt_comicsans.png
│ ├── fnt_dotumche.png
│ ├── fnt_ja_comicsans.png
│ ├── fnt_ja_dotumche.png
│ ├── fnt_ja_mainbig.png
│ ├── fnt_ja_main.png
│ ├── fnt_ja_small.png
│ ├── fnt_ja_tinynoelle.png
│ ├── fnt_mainbig.png
│ ├── fnt_main.png
│ ├── fnt_small.png
│ ├── fnt_tinynoelle.png
│ ├── glyphs_fnt_comicsans.csv
│ ├── glyphs_fnt_dotumche.csv
│ ├── glyphs_fnt_ja_comicsans.csv
│ ├── glyphs_fnt_ja_dotumche.csv
│ ├── glyphs_fnt_ja_mainbig.csv
│ ├── glyphs_fnt_ja_main.csv
│ ├── glyphs_fnt_ja_small.csv
│ ├── glyphs_fnt_ja_tinynoelle.csv
│ ├── glyphs_fnt_mainbig.csv
│ ├── glyphs_fnt_main.csv
│ ├── glyphs_fnt_small.csv
│ └── glyphs_fnt_tinynoelle.csv
├── goner_bg_loop.mp4
├── goner_bg.mkv
├── icon.png
├── IMAGE_DEPTH_0.png
├── IMAGE_SOUL_BLUR_0.png
├── preview.png
├── README.md
├── snd_menumove.wav
└── snd_select.wav
```

5
asset/chr/.gitignore vendored Normal file
View File

@@ -0,0 +1,5 @@
kris.png
noelle.png
ralsei.png
susie.png
!.gitignore

BIN
asset/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 48 KiB

BIN
asset/preview.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.9 MiB

4
build.sh Executable file
View File

@@ -0,0 +1,4 @@
# trap "rm -r dist" EXIT
mkdir dist
bun build --compile --production --outfile=dist/deltarune_device_contact ./src/index.ts
bun build --compile --outfile=dist/deltarune_device_contact_devel ./src/index.ts

66
bun.lock Normal file
View File

@@ -0,0 +1,66 @@
{
"lockfileVersion": 1,
"configVersion": 1,
"workspaces": {
"": {
"name": "bootseq",
"dependencies": {
"@kmamal/sdl": "^0.11.13",
"@napi-rs/canvas": "^0.1.84",
},
"devDependencies": {
"@types/bun": "latest",
},
"peerDependencies": {
"typescript": "^5",
},
},
},
"packages": {
"@isaacs/fs-minipass": ["@isaacs/fs-minipass@4.0.1", "", { "dependencies": { "minipass": "^7.0.4" } }, "sha512-wgm9Ehl2jpeqP3zw/7mo3kRHFp5MEDhqAdwy1fTGkHAwnkGOVsgpvQhL8B5n1qlb01jV3n/bI0ZfZp5lWA1k4w=="],
"@kmamal/sdl": ["@kmamal/sdl@0.11.13", "", { "dependencies": { "tar": "^7.4.3" } }, "sha512-9WmxYNtCggi7Ovq1cU7m/s5WXD/+eKQxDMnL3bU8B5vr5GlaLg4xLykDCpcbWKkJJ2i6llTrdL7LiqikwDFz4w=="],
"@napi-rs/canvas": ["@napi-rs/canvas@0.1.84", "", { "optionalDependencies": { "@napi-rs/canvas-android-arm64": "0.1.84", "@napi-rs/canvas-darwin-arm64": "0.1.84", "@napi-rs/canvas-darwin-x64": "0.1.84", "@napi-rs/canvas-linux-arm-gnueabihf": "0.1.84", "@napi-rs/canvas-linux-arm64-gnu": "0.1.84", "@napi-rs/canvas-linux-arm64-musl": "0.1.84", "@napi-rs/canvas-linux-riscv64-gnu": "0.1.84", "@napi-rs/canvas-linux-x64-gnu": "0.1.84", "@napi-rs/canvas-linux-x64-musl": "0.1.84", "@napi-rs/canvas-win32-x64-msvc": "0.1.84" } }, "sha512-88FTNFs4uuiFKP0tUrPsEXhpe9dg7za9ILZJE08pGdUveMIDeana1zwfVkqRHJDPJFAmGY3dXmJ99dzsy57YnA=="],
"@napi-rs/canvas-android-arm64": ["@napi-rs/canvas-android-arm64@0.1.84", "", { "os": "android", "cpu": "arm64" }, "sha512-pdvuqvj3qtwVryqgpAGornJLV6Ezpk39V6wT4JCnRVGy8I3Tk1au8qOalFGrx/r0Ig87hWslysPpHBxVpBMIww=="],
"@napi-rs/canvas-darwin-arm64": ["@napi-rs/canvas-darwin-arm64@0.1.84", "", { "os": "darwin", "cpu": "arm64" }, "sha512-A8IND3Hnv0R6abc6qCcCaOCujTLMmGxtucMTZ5vbQUrEN/scxi378MyTLtyWg+MRr6bwQJ6v/orqMS9datIcww=="],
"@napi-rs/canvas-darwin-x64": ["@napi-rs/canvas-darwin-x64@0.1.84", "", { "os": "darwin", "cpu": "x64" }, "sha512-AUW45lJhYWwnA74LaNeqhvqYKK/2hNnBBBl03KRdqeCD4tKneUSrxUqIv8d22CBweOvrAASyKN3W87WO2zEr/A=="],
"@napi-rs/canvas-linux-arm-gnueabihf": ["@napi-rs/canvas-linux-arm-gnueabihf@0.1.84", "", { "os": "linux", "cpu": "arm" }, "sha512-8zs5ZqOrdgs4FioTxSBrkl/wHZB56bJNBqaIsfPL4ZkEQCinOkrFF7xIcXiHiKp93J3wUtbIzeVrhTIaWwqk+A=="],
"@napi-rs/canvas-linux-arm64-gnu": ["@napi-rs/canvas-linux-arm64-gnu@0.1.84", "", { "os": "linux", "cpu": "arm64" }, "sha512-i204vtowOglJUpbAFWU5mqsJgH0lVpNk/Ml4mQtB4Lndd86oF+Otr6Mr5KQnZHqYGhlSIKiU2SYnUbhO28zGQA=="],
"@napi-rs/canvas-linux-arm64-musl": ["@napi-rs/canvas-linux-arm64-musl@0.1.84", "", { "os": "linux", "cpu": "arm64" }, "sha512-VyZq0EEw+OILnWk7G3ZgLLPaz1ERaPP++jLjeyLMbFOF+Tr4zHzWKiKDsEV/cT7btLPZbVoR3VX+T9/QubnURQ=="],
"@napi-rs/canvas-linux-riscv64-gnu": ["@napi-rs/canvas-linux-riscv64-gnu@0.1.84", "", { "os": "linux", "cpu": "none" }, "sha512-PSMTh8DiThvLRsbtc/a065I/ceZk17EXAATv9uNvHgkgo7wdEfTh2C3aveNkBMGByVO3tvnvD5v/YFtZL07cIg=="],
"@napi-rs/canvas-linux-x64-gnu": ["@napi-rs/canvas-linux-x64-gnu@0.1.84", "", { "os": "linux", "cpu": "x64" }, "sha512-N1GY3noO1oqgEo3rYQIwY44kfM11vA0lDbN0orTOHfCSUZTUyiYCY0nZ197QMahZBm1aR/vYgsWpV74MMMDuNA=="],
"@napi-rs/canvas-linux-x64-musl": ["@napi-rs/canvas-linux-x64-musl@0.1.84", "", { "os": "linux", "cpu": "x64" }, "sha512-vUZmua6ADqTWyHyei81aXIt9wp0yjeNwTH0KdhdeoBb6azHmFR8uKTukZMXfLCC3bnsW0t4lW7K78KNMknmtjg=="],
"@napi-rs/canvas-win32-x64-msvc": ["@napi-rs/canvas-win32-x64-msvc@0.1.84", "", { "os": "win32", "cpu": "x64" }, "sha512-YSs8ncurc1xzegUMNnQUTYrdrAuaXdPMOa+iYYyAxydOtg0ppV386hyYMsy00Yip1NlTgLCseRG4sHSnjQx6og=="],
"@types/bun": ["@types/bun@1.3.4", "", { "dependencies": { "bun-types": "1.3.4" } }, "sha512-EEPTKXHP+zKGPkhRLv+HI0UEX8/o+65hqARxLy8Ov5rIxMBPNTjeZww00CIihrIQGEQBYg+0roO5qOnS/7boGA=="],
"@types/node": ["@types/node@25.0.1", "", { "dependencies": { "undici-types": "~7.16.0" } }, "sha512-czWPzKIAXucn9PtsttxmumiQ9N0ok9FrBwgRWrwmVLlp86BrMExzvXRLFYRJ+Ex3g6yqj+KuaxfX1JTgV2lpfg=="],
"bun-types": ["bun-types@1.3.4", "", { "dependencies": { "@types/node": "*" } }, "sha512-5ua817+BZPZOlNaRgGBpZJOSAQ9RQ17pkwPD0yR7CfJg+r8DgIILByFifDTa+IPDDxzf5VNhtNlcKqFzDgJvlQ=="],
"chownr": ["chownr@3.0.0", "", {}, "sha512-+IxzY9BZOQd/XuYPRmrvEVjF/nqj5kgT4kEq7VofrDoM1MxoRjEWkrCC3EtLi59TVawxTAn+orJwFQcrqEN1+g=="],
"minipass": ["minipass@7.1.2", "", {}, "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw=="],
"minizlib": ["minizlib@3.1.0", "", { "dependencies": { "minipass": "^7.1.2" } }, "sha512-KZxYo1BUkWD2TVFLr0MQoM8vUUigWD3LlD83a/75BqC+4qE0Hb1Vo5v1FgcfaNXvfXzr+5EhQ6ing/CaBijTlw=="],
"tar": ["tar@7.5.2", "", { "dependencies": { "@isaacs/fs-minipass": "^4.0.0", "chownr": "^3.0.0", "minipass": "^7.1.2", "minizlib": "^3.1.0", "yallist": "^5.0.0" } }, "sha512-7NyxrTE4Anh8km8iEy7o0QYPs+0JKBTj5ZaqHg6B39erLg0qYXN3BijtShwbsNSvQ+LN75+KV+C4QR/f6Gwnpg=="],
"typescript": ["typescript@5.9.3", "", { "bin": { "tsc": "bin/tsc", "tsserver": "bin/tsserver" } }, "sha512-jl1vZzPDinLr9eUt3J/t7V6FgNEw9QjvBPdysz9KfQDD41fQrC2Y4vKQdiaUpFT4bXlb1RHhLpp8wtm6M5TgSw=="],
"undici-types": ["undici-types@7.16.0", "", {}, "sha512-Zz+aZWSj8LE6zoxD+xrjh4VfkIG8Ya6LvYkZqtUQGJPZjYl53ypCaUwWqo7eI0x66KBGeRo+mlBEkMSeSZ38Nw=="],
"yallist": ["yallist@5.0.0", "", {}, "sha512-YgvUTfwqyc7UXVMrB+SImsVYSmTS8X/tSrtdNZMImM+n7+QTriRXyXim0mBrTXNeqzVF0KWGgHPeiyViFFrNDw=="],
}
}

53
install-dev.sh Executable file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
set -euo pipefail
REPO_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
INSTALL_DIR="/opt/deltabootd_DEVICE_CONTACT"
BIN_NAME="deltarune_device_contact"
OUT_BIN="$INSTALL_DIR/dist/$BIN_NAME"
GREETD_CONFIG="${GREETD_CONFIG:-/etc/greetd/config.toml}"
GREETD_USER="${GREETD_USER:-greeter}"
BACKUP_SUFFIX="$(date +%Y%m%d%H%M%S)"
log() { printf '[install] %s\n' "$*"; }
command -v bun >/dev/null 2>&1 || { echo "bun is required on PATH"; exit 1; }
log "Copying project to $INSTALL_DIR"
sudo mkdir -p "$INSTALL_DIR"
sudo rsync -a --delete \
--exclude node_modules \
--exclude dist \
--exclude .git \
"$REPO_DIR"/ "$INSTALL_DIR"/
log "Installing dependencies in $INSTALL_DIR"
sudo env -C "$INSTALL_DIR" bun install
log "Installing N-API dependencies in $INSTALL_DIR"
sudo env -C "$INSTALL_DIR" bun pm trust --all
log "Building binary -> $OUT_BIN"
sudo env -C "$INSTALL_DIR" bun build --compile --production --outfile="$OUT_BIN" ./src/index.ts
sudo chmod +x "$OUT_BIN"
if [[ -f "$GREETD_CONFIG" ]]; then
log "Backing up greetd config to ${GREETD_CONFIG}.${BACKUP_SUFFIX}.bak"
sudo cp "$GREETD_CONFIG" "${GREETD_CONFIG}.${BACKUP_SUFFIX}.bak"
else
log "greetd config not found, creating $GREETD_CONFIG"
sudo mkdir -p "$(dirname "$GREETD_CONFIG")"
fi
log "Writing greetd config to launch $OUT_BIN as $GREETD_USER"
sudo tee "$GREETD_CONFIG" >/dev/null <<EOF
[terminal]
vt = 1
[default_session]
command = "bash -c 'IS_CAGE=1 SDL_VIDEODRIVER=wayland cage -s -- $OUT_BIN --debug --debug-log-file=/deltaboot-debug.txt'"
user = "root"
EOF
log "Done. Restart greetd to apply changes."

53
install.sh Executable file
View File

@@ -0,0 +1,53 @@
#!/usr/bin/env bash
set -euo pipefail
REPO_DIR="$(cd -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd)"
INSTALL_DIR="/opt/deltabootd_DEVICE_CONTACT"
BIN_NAME="deltarune_device_contact"
OUT_BIN="$INSTALL_DIR/dist/$BIN_NAME"
GREETD_CONFIG="${GREETD_CONFIG:-/etc/greetd/config.toml}"
GREETD_USER="${GREETD_USER:-greeter}"
BACKUP_SUFFIX="$(date +%Y%m%d%H%M%S)"
log() { printf '[install] %s\n' "$*"; }
command -v bun >/dev/null 2>&1 || { echo "bun is required on PATH"; exit 1; }
log "Copying project to $INSTALL_DIR"
sudo mkdir -p "$INSTALL_DIR"
sudo rsync -a --delete \
--exclude node_modules \
--exclude dist \
--exclude .git \
"$REPO_DIR"/ "$INSTALL_DIR"/
log "Installing dependencies in $INSTALL_DIR"
sudo env -C "$INSTALL_DIR" bun install
log "Installing N-API dependencies in $INSTALL_DIR"
sudo env -C "$INSTALL_DIR" bun pm trust --all
log "Building binary -> $OUT_BIN"
sudo env -C "$INSTALL_DIR" bun build --compile --production --outfile="$OUT_BIN" ./src/index.ts
sudo chmod +x "$OUT_BIN"
if [[ -f "$GREETD_CONFIG" ]]; then
log "Backing up greetd config to ${GREETD_CONFIG}.${BACKUP_SUFFIX}.bak"
sudo cp "$GREETD_CONFIG" "${GREETD_CONFIG}.${BACKUP_SUFFIX}.bak"
else
log "greetd config not found, creating $GREETD_CONFIG"
sudo mkdir -p "$(dirname "$GREETD_CONFIG")"
fi
log "Writing greetd config to launch $OUT_BIN as $GREETD_USER"
sudo tee "$GREETD_CONFIG" >/dev/null <<EOF
[terminal]
vt = 1
[default_session]
command = "bash -c 'IS_CAGE=1 SDL_VIDEODRIVER=wayland cage -s -- $OUT_BIN'"
user = "root"
EOF
log "Done. Restart greetd to apply changes."

21
package.json Normal file
View File

@@ -0,0 +1,21 @@
{
"name": "device_contact",
"module": "src/index.ts",
"type": "module",
"private": true,
"license": "MIT",
"scripts": {
"start": "bun run src/index.ts",
"dev": "SDL_VIDEODRIVER=wayland NODE_ENV=development bun run --watch src/ui/app.ts"
},
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5"
},
"dependencies": {
"@kmamal/sdl": "^0.11.13",
"@napi-rs/canvas": "^0.1.84"
}
}

82
src/audio/decoder.ts Normal file
View File

@@ -0,0 +1,82 @@
import { spawn } from 'child_process'
export type DecodedAudio = {
pcm: Buffer
sampleRate: number
channels: number
format: 's16'
bytesPerSample: number
bytesPerFrame: number
durationSeconds: number
}
export type DecodeOptions = {
sampleRate?: number
channels?: number
}
const ffmpegPath = Bun.which("ffmpeg");
console.debug("[debug] [audio/decoder] FFmpeg installed in:", ffmpegPath);
export async function decodeOggToPCM(
filePath: string,
{ sampleRate = 48_000, channels = 2 }: DecodeOptions = {},
): Promise<DecodedAudio> {
console.debug("[debug] [audio/decoder] decodeOggToPCM", { filePath, sampleRate, channels })
const args = [
'-v',
'error',
'-i',
filePath,
'-f',
's16le',
'-ac',
String(channels),
'-ar',
String(sampleRate),
'pipe:1',
]
if (!ffmpegPath) {
throw "FFmpeg is not installed!"
}
console.debug("[debug] [audio/decoder] spawn ffmpeg", args)
const ffmpeg = spawn(ffmpegPath, args, { stdio: ['ignore', 'pipe', 'pipe'] })
const chunks: Buffer[] = []
const stderr: Buffer[] = []
ffmpeg.stdout?.on('data', (chunk: Buffer) => chunks.push(chunk))
ffmpeg.stderr?.on('data', (chunk: Buffer) => stderr.push(chunk))
const exitCode: number = await new Promise((resolve, reject) => {
ffmpeg.on('error', reject)
ffmpeg.on('close', resolve)
})
console.debug("[debug] [audio/decoder] ffmpeg exited with code", exitCode)
if (exitCode !== 0) {
const message = Buffer.concat(stderr).toString() || `ffmpeg exited with code ${exitCode}`
throw new Error(`Failed to decode audio: ${message}`)
}
const pcm = Buffer.concat(chunks)
const bytesPerSample = 2
const bytesPerFrame = channels * bytesPerSample
const durationSeconds = pcm.length / (bytesPerFrame * sampleRate)
console.debug("[debug] [audio/decoder] successfully decoded pcm")
return {
pcm,
sampleRate,
channels,
format: 's16',
bytesPerSample,
bytesPerFrame,
durationSeconds,
}
}

51
src/audio/pitch.ts Normal file
View File

@@ -0,0 +1,51 @@
import type { DecodedAudio } from './decoder'
export type PitchRampOptions = {
durationSeconds?: number
startRatio?: number
endRatio?: number
}
export function createPitchRampBuffer(
audio: DecodedAudio,
{
durationSeconds = 1,
startRatio = 0,
endRatio = 1.0,
}: PitchRampOptions = {},
): Buffer {
console.debug("[debug] [audio/pitch] new PitchRampBuffer", { durationSeconds, startRatio, endRatio })
const totalFrames = Math.min(
Math.floor(durationSeconds * audio.sampleRate),
Math.floor(audio.pcm.length / audio.bytesPerFrame),
)
if (totalFrames <= 0) return Buffer.alloc(0)
const result = Buffer.alloc(totalFrames * audio.bytesPerFrame)
let sourcePosition = 0
for (let frame = 0; frame < totalFrames; frame++) {
const progress = totalFrames > 1 ? frame / (totalFrames - 1) : 1
const rate = startRatio + (endRatio - startRatio) * progress
const baseFrame = Math.floor(sourcePosition)
const nextFrame = Math.min(baseFrame + 1, Math.floor(audio.pcm.length / audio.bytesPerFrame) - 1)
const fraction = sourcePosition - baseFrame
for (let channel = 0; channel < audio.channels; channel++) {
const baseIndex = (baseFrame * audio.channels + channel) * audio.bytesPerSample
const nextIndex = (nextFrame * audio.channels + channel) * audio.bytesPerSample
const sampleA = audio.pcm.readInt16LE(baseIndex)
const sampleB = audio.pcm.readInt16LE(nextIndex)
const sample = sampleA + (sampleB - sampleA) * fraction
result.writeInt16LE(Math.round(sample), (frame * audio.channels + channel) * audio.bytesPerSample)
}
sourcePosition += rate
}
return result
}

124
src/audio/player.ts Normal file
View File

@@ -0,0 +1,124 @@
import sdl, { type Sdl } from '@kmamal/sdl'
import { resolveAssetPath } from '../renderer/assets'
import { createPitchRampBuffer, type PitchRampOptions } from './pitch'
import { decodeOggToPCM } from './decoder'
export type AudioLoopOptions = {
sampleRate?: number
channels?: number
pitchRamp?: PitchRampOptions
}
export class AudioLoopPlayer {
#playbacks: Sdl.Audio.AudioPlaybackInstance[] = []
#firstBuffer: Buffer
#baseBuffer: Buffer
#queueCheck: ReturnType<typeof setInterval> | undefined
#stopped = false
#usedFirstFor = new WeakSet<Sdl.Audio.AudioPlaybackInstance>()
private constructor(firstBuffer: Buffer, baseBuffer: Buffer, playbacks: Sdl.Audio.AudioPlaybackInstance[]) {
console.debug("[debug] [audio/player] new AudioLoopPlayer")
playbacks.forEach((pb) => {
console.debug(`[debug] [audio/player] ctor: provided audio device: ${pb.device.name}`)
})
this.#firstBuffer = firstBuffer
this.#baseBuffer = baseBuffer
this.#playbacks = playbacks
}
static async fromAsset(relativePath: string, options: AudioLoopOptions = {}): Promise<AudioLoopPlayer> {
console.debug("[debug] [audio/player] fromAsset", relativePath, options)
const { sampleRate = 48_000, channels = 2, pitchRamp } = options
const assetPath = resolveAssetPath(relativePath)
const decoded = await decodeOggToPCM(assetPath, { sampleRate, channels })
const rampBuffer = createPitchRampBuffer(decoded, pitchRamp)
const replacedBytes = rampBuffer.length > 0 ? Math.min(rampBuffer.length, decoded.pcm.length) : 0
const tail = decoded.pcm.subarray(replacedBytes)
const firstBuffer = rampBuffer.length > 0 ? Buffer.concat([rampBuffer, tail]) : decoded.pcm
const baseBuffer = decoded.pcm
const devices = selectPlaybackDevices()
const playbacks = devices.map((device) =>
sdl.audio.openDevice(device, {
format: decoded.format,
channels: decoded.channels as 1 | 2 | 4 | 6,
frequency: decoded.sampleRate,
})
)
return new AudioLoopPlayer(firstBuffer, baseBuffer, playbacks)
}
start(): void {
if (this.#playbacks.length === 0) return
if (this.#queueCheck) clearInterval(this.#queueCheck)
console.debug("[debug] [audio/player] start")
this.#playbacks.forEach((pb) => pb.clearQueue())
this.#stopped = false
this.#usedFirstFor = new WeakSet()
// Prime queue for seamless start.
this.#playbacks.forEach((pb) => {
this.#enqueueNextLoop(pb)
this.#enqueueNextLoop(pb)
pb.play(true)
})
this.#queueCheck = setInterval(() => this.#ensureQueued(), 100)
this.#queueCheck.unref?.()
}
stop(): void {
console.debug("[debug] [audio/player] stop")
if (this.#playbacks.length === 0) return
this.#stopped = true
if (this.#queueCheck) {
clearInterval(this.#queueCheck)
this.#queueCheck = undefined
}
this.#playbacks.forEach((pb) => {
pb.clearQueue()
pb.close()
})
this.#playbacks = []
}
get playing(): boolean {
return this.#playbacks.some((pb) => pb.playing)
}
#enqueueNextLoop(playback: Sdl.Audio.AudioPlaybackInstance): void {
if (!playback) return
const alreadyUsedFirst = this.#usedFirstFor.has(playback)
const buffer = alreadyUsedFirst ? this.#baseBuffer : this.#firstBuffer
playback.enqueue(buffer)
this.#usedFirstFor.add(playback)
console.debug("[debug] [audio/player] enqueued next loop")
}
#ensureQueued(): void {
if (this.#playbacks.length === 0 || this.#stopped) return
const minQueueBytes = this.#baseBuffer.length * 2
this.#playbacks.forEach((pb) => {
while (pb.queued < minQueueBytes) {
this.#enqueueNextLoop(pb)
}
})
}
}
function selectPlaybackDevices(): Sdl.Audio.PlaybackDevice[] {
const playbackDevices = sdl.audio.devices.filter(
(d): d is Sdl.Audio.PlaybackDevice => d.type === 'playback'
)
if (playbackDevices.length > 0) return playbackDevices
// last resort
return [{ type: 'playback' }]
}

156
src/bootsequence/dia.ts Normal file
View File

@@ -0,0 +1,156 @@
type ResolvableText = string | ((answers: Record<string, string>) => string);
type Question = {
t: "q",
text: ResolvableText,
answers: {
text: string,
value: string
}[],
id: string
}
type Dia = {
t: "d",
text: ResolvableText
}
type Wai = {
t: "w",
time: number
}
type Fun = {
t: "f",
f: () => any
}
type chrt = "kris" | "susie" | "ralsei" | "noelle"
type desktopt = "hyprland" | "plasma"
let chr: chrt = "kris";
let desktop: desktopt = "hyprland";
export function setChar(newchr: chrt) {
chr = newchr;
}
export function setDesktop(newdesktop: desktopt) {
desktop = newdesktop;
}
// TODO: Work on this a bit more
export const QUESTIONS: (Question | Dia | Wai | Fun)[] = [
{
t: "w",
time: 4000
},
{
t: "q",
id: "char",
text: "SELECT THE VESSEL YOU PREFER.",
answers: [
{
text: "RALSEI",
value: "ralsei"
},
{
text: "SUSIE",
value: "susie"
},
{
text: "KRIS",
value: "kris"
},
{
text: "NOELLE",
value: "noelle"
}
]
},
{
t: "d",
text: "YOU HAVE CHOSEN A WONDERFUL FORM."
},
{
t: "d",
text: () => `NOW LET US SHAPE ${(["noelle", "susie"]).includes(chr) ? "HER" : chr === "ralsei" ? "HIS" : "THEIR"} MIND AS YOUR OWN.`
},
{
t: "q",
id: "desktop",
text: () => `WHAT IS ${(["noelle", "susie"]).includes(chr) ? "HER" : chr === "ralsei" ? "HIS" : "THEIR"} FAVORITE DESKTOP ENVIRONMENT?`,
answers: [
{
text: "HYPRLAND",
value: "hyprland"
},
{
text: "KDE",
value: "plasma"
}
]
},
{
t: "d",
text: () => `${desktop === "hyprland" ? "HYPRLAND" : "KDE"}, INTERESTING CHOICE..`
},
{
t: "q",
id: "color",
text: "YOUR FAVORITE COLOR PALETTE?",
answers: [
{
text: "LATTE",
value: "latte"
},
{
text: "FRAPPE",
value: "frappe"
},
{
text: "MACCHIATO",
value: "macchiato"
},
{
text: "MOCHA",
value: "mocha"
}
]
},
{
t: "q",
id: "gift",
text: () => `PLEASE GIVE ${(["noelle", "susie"]).includes(chr) ? "HER" : chr === "ralsei" ? "HIM" : "THEM"} A GIFT.`,
answers: [
{
text: "KINDNESS",
value: "kindness"
},
{
text: "MIND",
value: "mind"
},
{
text: "AMBITION",
value: "ambition"
},
{
text: "BRAVERY",
value: "bravery"
}
]
},
{
t: "d",
text: "THANK YOU FOR YOUR TIME."
},
{
t: "d",
text: () => `YOUR WONDERFUL CREATION, ${chr.toUpperCase()}`
},
{
t: "d",
text: "WILL NOW BE"
}
]

216
src/bootsequence/font.ts Normal file
View File

@@ -0,0 +1,216 @@
import fs from "fs";
import { type CanvasRenderingContext2D, type Image } from "@napi-rs/canvas";
import { loadImageAsset, resolveAssetPath } from "../renderer/assets";
type Glyph = {
x: number;
y: number;
w: number;
h: number;
shift: number;
offset: number;
};
export type GlyphMap = Map<number, Glyph>;
export type BitmapFont = {
atlas: Image;
glyphs: GlyphMap;
lineHeight: number;
};
function loadGlyphs(csvRelativePath: string): GlyphMap {
const csvPath = resolveAssetPath(csvRelativePath);
const raw = fs.readFileSync(csvPath, "utf8");
const lines = raw.split(/\r?\n/).filter((l) => l.trim().length > 0);
const glyphs: GlyphMap = new Map();
for (let i = 1; i < lines.length; i++) {
const parts = lines[i]!.split(";");
if (parts.length < 7) continue;
const [
charCodeStr,
xStr,
yStr,
wStr,
hStr,
shiftStr,
offsetStr
] = parts;
const code = Number(charCodeStr);
const glyph: Glyph = {
x: Number(xStr),
y: Number(yStr),
w: Number(wStr),
h: Number(hStr),
shift: Number(shiftStr),
offset: Number(offsetStr)
};
if (Number.isFinite(code)) glyphs.set(code, glyph);
}
return glyphs;
}
function computeLineHeight(glyphs: GlyphMap): number {
let maxHeight = 0;
for (const glyph of glyphs.values()) {
if (glyph.h > maxHeight) maxHeight = glyph.h;
}
return maxHeight + 4;
}
export async function loadBitmapFont(
atlasRelativePath = "font/fnt_main.png",
glyphsRelativePath = "font/glyphs_fnt_main.csv"
): Promise<BitmapFont> {
const glyphs = loadGlyphs(glyphsRelativePath);
const lineHeight = computeLineHeight(glyphs);
const atlas = await loadImageAsset(atlasRelativePath);
return { atlas, glyphs, lineHeight };
}
type DrawOptions = {
align?: "left" | "center";
color?: string;
alpha?: number;
scale?: number; // <— TEXT SCALE
};
function normScale(scale: number | undefined): number {
const s = scale ?? 1;
return Number.isFinite(s) && s > 0 ? s : 1;
}
export function measureTextWidth(
text: string,
font: BitmapFont,
options: Pick<DrawOptions, "scale"> = {}
): number {
const scale = normScale(options.scale);
let width = 0;
for (const ch of text) {
const glyph = font.glyphs.get(ch.codePointAt(0) ?? 0);
width += (glyph?.shift ?? 0) * scale;
}
return width;
}
export function drawBitmapText(
ctx: CanvasRenderingContext2D,
font: BitmapFont,
text: string,
x: number,
y: number,
options: DrawOptions = {}
): void {
const { atlas, glyphs, lineHeight } = font;
const align = options.align ?? "left";
const color = options.color;
const alpha = options.alpha ?? 1;
const scale = normScale(options.scale);
let cursor = x;
if (align === "center") {
cursor = x - measureTextWidth(text, font, { scale }) / 2;
}
const previousAlpha = ctx.globalAlpha;
ctx.globalAlpha = previousAlpha * alpha;
const startX = cursor;
for (const ch of text) {
const glyph = glyphs.get(ch.codePointAt(0) ?? 0);
if (!glyph) {
cursor += 8 * scale;
continue;
}
(ctx as any).drawImage(
atlas as any,
glyph.x,
glyph.y,
glyph.w,
glyph.h,
cursor + glyph.offset * scale,
y,
glyph.w * scale,
glyph.h * scale
);
cursor += glyph.shift * scale;
}
if (color && color.toLowerCase() !== "white") {
const width = cursor - startX;
ctx.save();
ctx.globalAlpha = previousAlpha * alpha;
ctx.globalCompositeOperation = "source-atop";
ctx.fillStyle = color;
ctx.fillRect(startX, y, width, lineHeight * scale);
ctx.restore();
}
ctx.globalAlpha = previousAlpha;
}
export function drawBitmapTextPerGlyph(
ctx: CanvasRenderingContext2D,
font: BitmapFont,
text: string,
startX: number,
y: number,
options: DrawOptions = {}
): void {
const { atlas, glyphs } = font;
const color = options.color;
const alpha = options.alpha ?? 1;
const scale = normScale(options.scale);
let cursor = startX;
const previousAlpha = ctx.globalAlpha;
ctx.globalAlpha = previousAlpha * alpha;
for (const ch of text) {
const glyph = glyphs.get(ch.codePointAt(0) ?? 0);
if (!glyph) {
cursor += 8 * scale;
continue;
}
(ctx as any).drawImage(
atlas as any,
glyph.x,
glyph.y,
glyph.w,
glyph.h,
cursor + glyph.offset * scale,
y,
glyph.w * scale,
glyph.h * scale
);
if (color && color.toLowerCase() !== "white") {
ctx.save();
ctx.globalAlpha = previousAlpha * alpha;
ctx.globalCompositeOperation = "source-atop";
ctx.fillStyle = color;
ctx.fillRect(
cursor + glyph.offset * scale,
y,
glyph.w * scale,
glyph.h * scale
);
ctx.restore();
}
cursor += glyph.shift * scale;
}
ctx.globalAlpha = previousAlpha;
}

View File

@@ -0,0 +1,466 @@
import sdl from "@kmamal/sdl";
import { type CanvasRenderingContext2D, type Image } from "@napi-rs/canvas";
import { loadImageAsset } from "../renderer/assets";
import { type BitmapFont, drawBitmapTextPerGlyph, loadBitmapFont, measureTextWidth } from "./font";
import { QUESTIONS, setChar, setDesktop } from "./dia";
import { homedir } from "os";
import { join } from "path";
import { writeFileSync } from "fs";
type ResolvableText = string | ((answers: Record<string, string>) => string);
type BootsequenceAnswerKey = keyof BootsequenceAnswers;
type QuestionAnswer = {
text: string;
value: string;
};
type QuestionEntry = {
t: "q";
id: string;
text: ResolvableText;
answers: QuestionAnswer[];
};
type DialogueEntry = {
t: "d";
text: ResolvableText;
};
type FunctionEntry = {
t: "f",
f: () => any
}
type WaitEntry = {
t: "w";
time: number;
};
type SequenceEntry = QuestionEntry | DialogueEntry | WaitEntry | FunctionEntry;
const TYPEWRITER_SPEED = 16; // chars/s
const DIALOGUE_HOLD_MS = 1200;
const HEART_SCALE = 1.1;
const TYPEWRITER_DISABLED = false;
const ANSWER_FADE_MS = 220;
const BLUR_OFFSETS = [
[-1, 0],
[1, 0],
[0, -1],
[0, 1]
] as const;
type KeyInput = {
key: string | null;
scancode: number;
ctrl: number;
shift: number;
alt: number;
super: number;
};
import type { BootsequenceAnswers } from "../types";
export type BootSequenceUI = {
update: (deltaMs: number) => void;
render: (ctx: CanvasRenderingContext2D) => void;
handleKey: (input: KeyInput) => void;
isFinished: () => boolean;
getAnswers: () => BootsequenceAnswers;
};
function wrapLines(text: string, font: BitmapFont, maxWidth: number): string[] {
const tokens = text.split(/(\s+)/);
const lines: string[] = [];
let current = "";
for (const token of tokens) {
const next = current + token;
if (measureTextWidth(next.trimEnd(), font) <= maxWidth) {
current = next;
continue;
}
if (current.trim().length > 0) {
lines.push(current.trimEnd());
}
current = token.trimStart();
}
if (current.trim().length > 0) {
lines.push(current.trimEnd());
}
if (lines.length === 0) return [text];
return lines;
}
export async function createBootSequenceUI(
baseWidth: number,
baseHeight: number
): Promise<BootSequenceUI> {
const questionFont = await loadBitmapFont();
const answerFont = await loadBitmapFont();
const heart = await loadImageAsset("IMAGE_SOUL_BLUR_0.png");
const CHARACTER_IDS = ["ralsei", "susie", "kris", "noelle"] as const;
type CharacterId = (typeof CHARACTER_IDS)[number];
const characterSprites: Record<CharacterId, Image> = {
ralsei: await loadImageAsset("chr/ralsei.png"),
susie: await loadImageAsset("chr/susie.png"),
kris: await loadImageAsset("chr/kris.png"),
noelle: await loadImageAsset("chr/noelle.png")
};
const isCharacterId = (value: string | undefined): value is CharacterId =>
CHARACTER_IDS.includes(value as CharacterId);
let currentIndex = 0;
let visibleChars = 0;
let selection = 0;
let finished = false;
const answers: BootsequenceAnswers = {
char: "",
desktop: "",
color: "",
gift: ""
};
let dialogueHold = 0;
let loggedCompletion = false;
const textCache = new WeakMap<SequenceEntry, string>();
const graphemeCache = new WeakMap<SequenceEntry, string[]>();
let waitElapsed = 0;
let answerAlpha = 0;
const lineCache = new WeakMap<SequenceEntry, string[]>();
const currentEntry = (): SequenceEntry | FunctionEntry | undefined => QUESTIONS[currentIndex];
const resolveText = (entry: QuestionEntry | DialogueEntry): string => {
const cached = textCache.get(entry);
if (cached) return cached;
const rawText = entry.text;
const resolved = typeof rawText === "function" ? rawText(answers) : rawText;
textCache.set(entry, resolved);
return resolved;
};
const graphemesForEntry = (entry: SequenceEntry | undefined): string[] => {
if (!entry) return [];
if (entry.t === "w") return [];
if (entry.t === "f") return [];
const cached = graphemeCache.get(entry);
if (cached) return cached;
const graphemes = Array.from(resolveText(entry));
graphemeCache.set(entry, graphemes);
return graphemes;
};
const linesForEntry = (entry: SequenceEntry): string[] => {
if (entry.t === "w") return [];
if (entry.t === "f") return [];
const cached = lineCache.get(entry);
if (cached) return cached;
const lines = wrapLines(resolveText(entry), questionFont, baseWidth * 0.9);
lineCache.set(entry, lines);
return lines;
};
const resetForEntry = () => {
visibleChars = 0;
selection = 0;
dialogueHold = 0;
waitElapsed = 0;
answerAlpha = 0;
};
const advance = () => {
currentIndex += 1;
if (currentIndex >= QUESTIONS.length) {
finished = true;
} else {
resetForEntry();
}
};
const skipTypewriter = () => {
const entry = currentEntry();
if (!entry) return;
if (entry.t === "w") {
waitElapsed = entry.time;
return;
}
if (entry.t === "f") {
entry.f();
return;
}
visibleChars = graphemesForEntry(entry).length;
};
const handleConfirm = () => {
const entry = currentEntry();
if (!entry) return;
if (entry.t === "f") {
advance();
return;
}
const fullyRevealed =
entry.t === "w" ? waitElapsed >= entry.time : visibleChars >= graphemesForEntry(entry).length;
if (!fullyRevealed) {
skipTypewriter();
return;
}
if (entry.t === "d") {
advance();
return;
}
if (entry.t === "w") {
advance();
return;
}
const picked = entry.answers[selection];
if (picked) {
if (isAnswerKey(entry.id)) {
answers[entry.id] = picked.value;
}
if (entry.id === "char" && isCharacterId(picked.value)) {
setChar(picked.value);
}
if (entry.id === "desktop") {
setDesktop(picked.value as any);
}
console.debug(`[debug] [bootsequence/questions] answer ${entry.id}: ${picked.value} (${picked.text})`);
}
advance();
};
const handleMove = (dir: -1 | 1) => {
const entry = currentEntry();
if (!entry || entry.t !== "q") return;
const fullyRevealed = visibleChars >= graphemesForEntry(entry).length;
if (!fullyRevealed) return;
const next = (selection + dir + entry.answers.length) % entry.answers.length;
selection = next;
};
const update = (deltaMs: number) => {
if (finished) {
console.debug("[debug] [bootsequence/questions] finish", deltaMs, finished, loggedCompletion)
if (!loggedCompletion) {
loggedCompletion = true;
console.info("[debug] [bootsequence/questions] finished questions", answers);
writeFileSync(join(homedir(), ".deltaboot.json"), JSON.stringify(answers))
}
return;
}
const entry = currentEntry();
if (!entry) return;
if (entry.t === "w") {
waitElapsed += deltaMs;
if (waitElapsed >= entry.time) advance();
return;
}
if (entry.t === "f") {
entry.f();
return;
}
const totalGraphemes = graphemesForEntry(entry).length;
if (TYPEWRITER_DISABLED) {
visibleChars = totalGraphemes;
} else {
const step = (deltaMs / 1000) * TYPEWRITER_SPEED;
visibleChars = Math.min(totalGraphemes, visibleChars + step);
}
const fullyRevealed = visibleChars >= totalGraphemes;
if (entry.t === "d" && fullyRevealed) {
dialogueHold += deltaMs;
if (dialogueHold >= DIALOGUE_HOLD_MS) {
advance();
}
}
const targetAlpha =
entry.t === "q" && fullyRevealed
? 1
: 0;
const delta = deltaMs / ANSWER_FADE_MS;
if (targetAlpha > answerAlpha) {
answerAlpha = Math.min(targetAlpha, answerAlpha + delta);
} else {
answerAlpha = Math.max(targetAlpha, answerAlpha - delta);
}
};
const renderQuestionText = (ctx: CanvasRenderingContext2D, entry: SequenceEntry) => {
const graphemes = graphemesForEntry(entry);
const visibleCount = Math.floor(visibleChars);
const linesFull = linesForEntry(entry);
let remaining = visibleCount;
const startX = baseWidth * 0.08;
const startY = baseHeight * 0.04;
for (let i = 0; i < linesFull.length; i++) {
const fullLine = linesFull[i] ?? "";
const lineGraphemes = Array.from(fullLine);
const take = Math.max(0, Math.min(lineGraphemes.length, remaining));
remaining = Math.max(0, remaining - take);
const line = lineGraphemes.slice(0, take).join("");
const y = startY + i * questionFont.lineHeight;
let cursor = startX;
for (const ch of line) {
const glyph = questionFont.glyphs.get(ch.codePointAt(0) ?? 0);
const glyphWidth = glyph?.shift ?? 8;
const drawX = cursor + (glyph?.offset ?? 0);
ctx.save();
ctx.globalAlpha = 0.3;
for (const [ox, oy] of BLUR_OFFSETS) {
drawBitmapTextPerGlyph(ctx, questionFont, ch, (drawX + ox), (y + oy) - 15, { align: "left" });
}
ctx.restore();
drawBitmapTextPerGlyph(ctx, questionFont, ch, drawX, y - 15, { align: "left" });
cursor += glyphWidth;
}
}
};
const renderAnswers = (
ctx: CanvasRenderingContext2D,
answersList: QuestionAnswer[],
visible: boolean
) => {
if (!visible && answerAlpha <= 0) return;
const startX = baseWidth * 0.28;
const startY = baseHeight * 0.45;
const alpha = answerAlpha;
for (let i = 0; i < answersList.length; i++) {
const answer = answersList[i]!;
const y = startY + i * answerFont.lineHeight * 1.1;
const isActive = i === selection;
const color = "white";
let cursor = startX;
ctx.save();
ctx.globalAlpha = alpha;
for (const ch of answer.text) {
const glyph = answerFont.glyphs.get(ch.codePointAt(0) ?? 0);
const glyphWidth = glyph?.shift ?? 8;
const drawX = cursor + (glyph?.offset ?? 0);
ctx.save();
ctx.globalAlpha = alpha * 0.3;
for (const [ox, oy] of BLUR_OFFSETS) {
drawBitmapTextPerGlyph(ctx, answerFont, ch, (drawX + ox) - 30, y + oy, { align: "left" });
}
ctx.restore();
drawBitmapTextPerGlyph(ctx, answerFont, ch, drawX - 30, y, { align: "left" });
cursor += glyphWidth;
}
ctx.restore();
if (isActive) {
const heartX = startX - heart.width * HEART_SCALE - 12;
const heartY = y - heart.height * HEART_SCALE * 0.2;
ctx.save();
ctx.globalAlpha = alpha;
drawHeart(ctx, heart, heartX - 25, heartY + 2);
ctx.restore();
}
}
};
const renderDialogue = (ctx: CanvasRenderingContext2D, entry: DialogueEntry) => {
renderQuestionText(ctx, entry);
};
const renderCharacterPreview = (ctx: CanvasRenderingContext2D, character: CharacterId) => {
const sprite = characterSprites[character];
if (!sprite) return;
const maxWidth = baseWidth * 0.35;
const maxHeight = baseHeight * 0.55;
const scale = Math.min(2, Math.min(maxWidth / sprite.width, maxHeight / sprite.height));
const drawWidth = sprite.width * scale;
const drawHeight = sprite.height * scale;
const drawX = (baseWidth - drawWidth) / 2;
const drawY = (baseHeight - drawHeight) / 2;
ctx.save();
ctx.globalAlpha = 0.9;
(ctx as any).drawImage(sprite, drawX + 30, drawY + 30, drawWidth, drawHeight);
ctx.restore();
};
const render = (ctx: CanvasRenderingContext2D) => {
if (finished) return;
const entry = currentEntry();
if (!entry) return;
const selectedChar = answers["char"];
const showCharacter =
entry.t === "q" && entry.id === "char" && visibleChars >= graphemesForEntry(entry).length
? entry.answers[selection]?.value
: selectedChar;
if (isCharacterId(showCharacter)) {
renderCharacterPreview(ctx, showCharacter);
}
if (entry.t === "w") return;
if (entry.t === "f") return;
if (entry.t === "d") {
renderDialogue(ctx, entry);
} else {
renderQuestionText(ctx, entry);
const fullyRevealed = visibleChars >= graphemesForEntry(entry).length;
renderAnswers(ctx, entry.answers, fullyRevealed);
}
};
const handleKey = (input: KeyInput) => {
if (finished) return;
const key = (input.key ?? "").toLowerCase();
const sc = input.scancode;
const ctrlHeld = input.ctrl > 0 || key === "control" || key === "ctrl";
if (ctrlHeld) {
skipTypewriter();
return;
}
if (
key === "arrowup" ||
key === "up" ||
sc === sdl.keyboard.SCANCODE.UP
) {
handleMove(-1);
return;
}
if (
key === "arrowdown" ||
key === "down" ||
sc === sdl.keyboard.SCANCODE.DOWN
) {
handleMove(1);
return;
}
if (
key === "enter" ||
key === "return" ||
key === "z" ||
sc === sdl.keyboard.SCANCODE.RETURN ||
sc === sdl.keyboard.SCANCODE.SPACE
) {
handleConfirm();
}
};
return {
update,
render,
handleKey,
isFinished: () => finished,
getAnswers: () => ({ ...answers })
};
}
function isAnswerKey(value: string): value is BootsequenceAnswerKey {
return value === "char" || value === "desktop" || value === "color" || value === "gift";
}
function drawHeart(ctx: CanvasRenderingContext2D, heart: Image, x: number, y: number): void {
ctx.save();
(ctx as any).drawImage(heart, x, y, heart.width * HEART_SCALE, heart.height * HEART_SCALE);
ctx.restore();
}

40
src/config.ts Normal file
View File

@@ -0,0 +1,40 @@
import { readFileSync } from "node:fs";
const PASSWORD_PATH = "/etc/deltaboot/private/password";
const USERNAME_PATH = "/etc/deltaboot/private/username";
/**
* Private credentials are expected to be owned by root:
* /etc/deltaboot/private -root 700 (dr-x------)
* /etc/deltaboot/private/* - root 600 (-r--------)
*/
export function getDefaultPassword(): string {
try {
return readFileSync(PASSWORD_PATH, "utf8").trim();
} catch (error) {
if (isIgnorableFsError(error)) {
return "";
}
console.warn(`[config] failed to read default password from ${PASSWORD_PATH}`, error);
return "";
}
}
export function getDefaultUser(): string {
try {
const value = readFileSync(USERNAME_PATH, "utf8").trim();
return value || "ralsei";
} catch (error) {
if (isIgnorableFsError(error)) {
return "ralsei";
}
console.warn(`[config] failed to read default user from ${USERNAME_PATH}`, error);
return "ralsei";
}
}
function isIgnorableFsError(error: unknown): error is { code?: string } {
const code = (error as { code?: string } | undefined)?.code;
return code === "ENOENT" || code === "EACCES";
}

47
src/desktop.ts Normal file
View File

@@ -0,0 +1,47 @@
import { getDefaultPassword, getDefaultUser } from "./config";
import { GreetdClient } from "./lib/greetd";
export const GREETD_SOCKET = process.env.GREETD_SOCK ?? "/run/dummy-greetd.sock";
const KNOWN_SESSIONS: Record<string, string> = {
hyprland: "Hyprland",
plasma: "startplasma-wayland"
};
const GREETD_TIMEOUT_MS = Number(process.env.GREETD_TIMEOUT_MS ?? 5_000);
export async function handoffToGreetd(desktopHint?: string): Promise<void> {
console.debug("[desktop] starting greetd handoff", {
socket: GREETD_SOCKET,
desktopHint
});
const username =
process.env.GREETD_USERNAME ??
getDefaultUser() ??
process.env.USER ??
"greeter";
const password = getDefaultPassword();
const sessionCommand = resolveSessionCommand(desktopHint);
console.debug("[desktop] using credentials", { username, sessionCommand });
const client = new GreetdClient({ ipcSocketPath: GREETD_SOCKET, timeoutMs: GREETD_TIMEOUT_MS });
await client.login({
username,
password,
cmd: sessionCommand,
env: []
});
}
function resolveSessionCommand(desktopHint?: string): string {
const candidate =
desktopHint ??
process.env.DESKTOP_SESSION_FRIENDLY_NAME ??
process.env.XDG_CURRENT_DESKTOP ??
"";
const normalized = candidate.trim().toLowerCase();
const mapped = KNOWN_SESSIONS[normalized];
if (mapped) return mapped;
if (!candidate.trim()) {
throw new Error("No desktop session hint available for greetd handoff");
}
return candidate.trim();
}

3
src/index.ts Normal file
View File

@@ -0,0 +1,3 @@
import { runDeviceContactUI } from "./ui/app";
await runDeviceContactUI();

174
src/intro/text-layer.ts Normal file
View File

@@ -0,0 +1,174 @@
import fs from "fs";
import {
createCanvas,
type Canvas,
type CanvasRenderingContext2D,
type Image
} from "@napi-rs/canvas";
import { loadImageAsset, resolveAssetPath } from "../renderer/assets";
type Glyph = {
x: number;
y: number;
w: number;
h: number;
shift: number;
offset: number;
};
type GlyphMap = Map<number, Glyph>;
const FONT_ATLAS_PATH = "font/fnt_main.png";
const FONT_GLYPHS_PATH = "font/glyphs_fnt_main.csv";
const BLUR_SCALE = 1;
const BLUR_RADIUS = 2;
function loadGlyphs(): GlyphMap {
const csvPath = resolveAssetPath(FONT_GLYPHS_PATH);
const raw = fs.readFileSync(csvPath, "utf8");
const lines = raw.split(/\r?\n/).filter((l) => l.trim().length > 0);
const glyphs: GlyphMap = new Map();
// First line is metadata; skip it.
for (let i = 1; i < lines.length; i++) {
const parts = lines[i]!.split(";");
if (parts.length < 7) continue;
const [
charCodeStr,
xStr,
yStr,
wStr,
hStr,
shiftStr,
offsetStr
] = parts;
const code = Number(charCodeStr);
const glyph: Glyph = {
x: Number(xStr),
y: Number(yStr),
w: Number(wStr),
h: Number(hStr),
shift: Number(shiftStr),
offset: Number(offsetStr)
};
if (Number.isFinite(code)) glyphs.set(code, glyph);
}
return glyphs;
}
function computeLineHeight(glyphs: GlyphMap): number {
let maxHeight = 0;
for (const glyph of glyphs.values()) {
if (glyph.h > maxHeight) maxHeight = glyph.h;
}
return maxHeight + 4; // seperation between lines
}
function measureTextWidth(text: string, glyphs: GlyphMap): number {
let width = 0;
for (const ch of text) {
const glyph = glyphs.get(ch.codePointAt(0) ?? 0);
width += glyph?.shift ?? 0;
}
return width;
}
function drawBitmapText(
ctx: CanvasRenderingContext2D,
text: string,
atlas: Image,
glyphs: GlyphMap,
x: number,
y: number
): void {
let cursor = x;
for (const ch of text) {
const glyph = glyphs.get(ch.codePointAt(0) ?? 0);
if (!glyph) {
cursor += 8; // missing glyph?
continue;
}
(ctx as any).drawImage(
atlas as any,
glyph.x,
glyph.y,
glyph.w,
glyph.h,
cursor + glyph.offset,
y,
glyph.w,
glyph.h
);
cursor += glyph.shift;
}
}
export type IntroTextLayer = {
canvas: Canvas;
redraw: (text: string) => void;
};
export async function createIntroTextLayer(
width: number,
height: number,
initialText: string
): Promise<IntroTextLayer> {
const glyphs = loadGlyphs();
const lineHeight = computeLineHeight(glyphs);
const atlas = await loadImageAsset(FONT_ATLAS_PATH);
const canvas = createCanvas(width, height); // final composite
const ctx = canvas.getContext("2d");
const textCanvas = createCanvas(width, height); // crisp text only
const textCtx = textCanvas.getContext("2d");
const blurCanvas = createCanvas(
Math.max(1, Math.round(width * BLUR_SCALE)),
Math.max(1, Math.round(height * BLUR_SCALE))
);
const blurCtx = blurCanvas.getContext("2d");
const redraw = (text: string) => {
textCtx.clearRect(0, 0, width, height);
textCtx.imageSmoothingEnabled = false;
blurCtx.clearRect(0, 0, blurCanvas.width, blurCanvas.height);
const lines = text.split(/\r?\n/);
const totalHeight = lines.length * lineHeight;
const startY = (height - totalHeight) / 2;
for (let i = 0; i < lines.length; i++) {
const line = lines[i] ?? "";
const textWidth = measureTextWidth(line, glyphs);
const x = (width - textWidth) / 2;
const y = startY + i * lineHeight;
drawBitmapText(textCtx, line, atlas, glyphs, x, y);
}
// Pixelated blur: downscale the text, blur at the lower resolution, upscale without smoothing.
blurCtx.imageSmoothingEnabled = false;
blurCtx.filter = `blur(${BLUR_RADIUS}px)`;
blurCtx.drawImage(
textCanvas as any,
0,
0,
blurCanvas.width,
blurCanvas.height
);
blurCtx.filter = "none";
ctx.clearRect(0, 0, width, height);
ctx.imageSmoothingEnabled = false;
ctx.globalAlpha = 0.9;
ctx.drawImage(blurCanvas as any, 0, 0, width, height);
ctx.globalAlpha = 1;
ctx.drawImage(textCanvas as any, 0, 0, width, height);
};
redraw(initialText);
return { canvas, redraw };
}

260
src/lib/greetd.ts Normal file
View File

@@ -0,0 +1,260 @@
import net from "node:net";
import os from "node:os";
export interface GreetdLoginOptions {
username: string;
password: string;
cmd: string | string[];
env?: string[];
}
export const GREETD_IPC_SOCKET_PATH_ENV_NAME = "GREETD_SOCK";
export type AuthenticationMsgType = "visible" | "secret" | "info" | "error";
export type ResponseErrorType = "auth_error" | "error";
export type Request =
| { type: "create_session"; username: string }
| { type: "post_auth_message_response"; response: string | null }
| { type: "start_session"; cmd: string[]; env: string[] }
| { type: "cancel_session" };
export type Response =
| { type: "success" }
| { type: "error"; error_type: ResponseErrorType; description: string }
| { type: "auth_message"; auth_message_type: AuthenticationMsgType; auth_message: string };
type Endianness = "LE" | "BE";
function getNativeEndianness(): Endianness {
return os.endianness();
}
function writeU32(buffer: Buffer, value: number, endian: Endianness, offset = 0): void {
if (endian === "LE") buffer.writeUInt32LE(value >>> 0, offset);
else buffer.writeUInt32BE(value >>> 0, offset);
}
function readU32(buffer: Buffer, endian: Endianness, offset = 0): number {
return endian === "LE" ? buffer.readUInt32LE(offset) : buffer.readUInt32BE(offset);
}
function parseResponse(value: unknown): Response {
if (!value || typeof value !== "object") {
throw new Error("Invalid greetd response: not an object");
}
const record = value as Record<string, unknown>;
const type = record.type;
if (type === "success") {
return { type: "success" };
}
if (type === "error") {
const errorType = record.error_type;
const description = record.description;
if (errorType !== "auth_error" && errorType !== "error") {
throw new Error(`Invalid greetd response: unknown error_type ${String(errorType)}`);
}
if (typeof description !== "string") {
throw new Error("Invalid greetd response: missing description");
}
return { type: "error", error_type: errorType, description };
}
if (type === "auth_message") {
const authMessageType = record.auth_message_type;
const authMessage = record.auth_message;
if (
authMessageType !== "visible" &&
authMessageType !== "secret" &&
authMessageType !== "info" &&
authMessageType !== "error"
) {
throw new Error(
`Invalid greetd response: unknown auth_message_type ${String(authMessageType)}`
);
}
if (typeof authMessage !== "string") {
throw new Error("Invalid greetd response: missing auth_message");
}
return { type: "auth_message", auth_message_type: authMessageType, auth_message: authMessage };
}
throw new Error(`Invalid greetd response type: ${String(type)}`);
}
function withTimeout<T>(promise: Promise<T>, timeoutMs: number, message: string): Promise<T> {
if (!Number.isFinite(timeoutMs) || timeoutMs <= 0) return promise;
let timeout: NodeJS.Timeout | undefined;
return Promise.race([
promise.finally(() => {
if (timeout) clearTimeout(timeout);
}),
new Promise<T>((_, reject) => {
timeout = setTimeout(() => reject(new Error(message)), timeoutMs).unref();
})
]);
}
export class GreetdIPC {
private readonly socket: net.Socket;
private readonly endian: Endianness;
private buffer = Buffer.alloc(0);
private frameQueue: Buffer[] = [];
private frameWaiters: Array<{ resolve: (frame: Buffer) => void; reject: (err: unknown) => void }> =
[];
private closedError: unknown | null = null;
private constructor(socket: net.Socket, endian: Endianness) {
this.socket = socket;
this.endian = endian;
socket.on("data", (chunk: Buffer) => {
const data = Buffer.from(chunk);
this.buffer = this.buffer.length === 0 ? data : Buffer.concat([this.buffer, data]);
this.drainFrames();
});
socket.on("error", (error) => this.closeWithError(error));
socket.on("close", () => this.closeWithError(new Error("greetd socket closed")));
}
static async new(socketPath: string | null, endian: Endianness = getNativeEndianness()): Promise<GreetdIPC> {
const path =
socketPath ??
(process.env[GREETD_IPC_SOCKET_PATH_ENV_NAME] as string | undefined) ??
null;
if (!path) {
throw new Error(`${GREETD_IPC_SOCKET_PATH_ENV_NAME} is not set and no socketPath was provided`);
}
const socket = net.createConnection({ path });
await new Promise<void>((resolve, reject) => {
socket.once("connect", resolve);
socket.once("error", reject);
});
return new GreetdIPC(socket, endian);
}
close(): void {
this.closeWithError(null);
}
private closeWithError(error: unknown | null): void {
if (this.closedError !== null) return;
this.closedError = error ?? new Error("greetd socket closed");
for (const waiter of this.frameWaiters.splice(0)) {
waiter.reject(this.closedError);
}
this.frameQueue = [];
this.buffer = Buffer.alloc(0);
this.socket.destroy();
}
private drainFrames(): void {
while (this.buffer.length >= 4) {
const payloadLen = readU32(this.buffer, this.endian, 0);
if (!Number.isFinite(payloadLen) || payloadLen < 0) {
this.closeWithError(new Error("Invalid greetd frame length"));
return;
}
const totalLen = 4 + payloadLen;
if (this.buffer.length < totalLen) return;
const frame = this.buffer.subarray(4, totalLen);
this.buffer = this.buffer.subarray(totalLen);
if (this.frameWaiters.length > 0) {
const waiter = this.frameWaiters.shift();
waiter?.resolve(frame);
} else {
this.frameQueue.push(frame);
}
}
}
private async readFrame(): Promise<Buffer> {
if (this.closedError) throw this.closedError;
if (this.frameQueue.length > 0) return this.frameQueue.shift() as Buffer;
return await new Promise<Buffer>((resolve, reject) => {
this.frameWaiters.push({ resolve, reject });
});
}
async sendMsg(request: Request): Promise<void> {
if (this.closedError) throw this.closedError;
const payload = Buffer.from(JSON.stringify(request), "utf8");
const header = Buffer.alloc(4);
writeU32(header, payload.length, this.endian, 0);
const msg = payload.length === 0 ? header : Buffer.concat([header, payload]);
await new Promise<void>((resolve, reject) => {
this.socket.write(msg, (err) => (err ? reject(err) : resolve()));
});
}
async readMsg(): Promise<Response> {
const frame = await this.readFrame();
let parsed: unknown;
try {
parsed = JSON.parse(frame.toString("utf8"));
} catch (error) {
throw new Error(`Failed to parse greetd JSON response: ${String(error)}`);
}
return parseResponse(parsed);
}
}
export class GreetdClient {
private ipcSocketPath: string;
private timeoutMs: number;
constructor(options: { ipcSocketPath?: string; timeoutMs?: number } = {}) {
const { ipcSocketPath = "/run/greetd.sock", timeoutMs = 5_000 } = options;
this.ipcSocketPath = ipcSocketPath;
this.timeoutMs = timeoutMs;
}
async login(options: GreetdLoginOptions): Promise<void> {
if (!options.username) throw new Error("username is a required parameter.");
const cmd = Array.isArray(options.cmd) ? options.cmd : [options.cmd];
if (cmd.length === 0 || cmd.every((part) => !part.trim())) {
throw new Error("cmd is a required parameter.");
}
const ipc = await GreetdIPC.new(this.ipcSocketPath);
try {
let stage: "create_session" | "start_session" = "create_session";
await ipc.sendMsg({ type: "create_session", username: options.username });
while (true) {
const response = await withTimeout(
ipc.readMsg(),
this.timeoutMs,
`greetd timeout waiting for ${stage} response`
);
if (response.type === "error") {
throw new Error(`greetd error: ${response.description}`);
}
if (response.type === "auth_message") {
const reply =
response.auth_message_type === "secret"
? (options.password ?? "")
: response.auth_message_type === "visible"
? ""
: null;
await ipc.sendMsg({ type: "post_auth_message_response", response: reply });
continue;
}
if (response.type === "success") {
if (stage === "create_session") {
stage = "start_session";
await ipc.sendMsg({ type: "start_session", cmd, env: options.env ?? [] });
continue;
}
return;
}
}
} finally {
ipc.close();
}
}
}

15
src/renderer/assets.ts Normal file
View File

@@ -0,0 +1,15 @@
import path from "path";
import { loadImage, type Image } from "@napi-rs/canvas";
const ASSET_ROOT = path.resolve(__dirname, "..", "..", "asset");
export function resolveAssetPath(relativePath: string): string {
return path.join(ASSET_ROOT, relativePath);
}
export async function loadImageAsset(relativePath: string): Promise<Image> {
console.debug("[debug] [renderer/assets] loadImageAsset " + relativePath)
const assetPath = resolveAssetPath(relativePath);
return loadImage(assetPath);
}

104
src/renderer/cli.ts Normal file
View File

@@ -0,0 +1,104 @@
export type CliConfig = {
rendererId?: string;
debugGlobalHud: boolean;
debugRendererHud: boolean;
crashRecoverySession?: string | true;
errorScreenRequested?: boolean;
errorScreenMessage?: string;
errorScreenTitle?: string;
errorScreenHint?: string;
debugLogFile?: string;
helpRequested: boolean;
};
export function parseCli(argv: string[]): CliConfig {
// Bun passes: [bunPath, scriptPath, ...]
const args = argv.slice(2);
const config: CliConfig = {
debugGlobalHud: false,
debugRendererHud: false,
helpRequested: false
};
for (let i = 0; i < args.length; i++) {
const arg = args[i] ?? "";
if (arg === "--help" || arg === "-h") {
config.helpRequested = true;
continue;
}
if (arg === "--renderer" && args[i + 1] && !args[i + 1]!.startsWith("--")) {
config.rendererId = args[i + 1]!;
i += 1;
continue;
}
if (arg.startsWith("--renderer=")) {
config.rendererId = arg.split("=")[1];
continue;
}
if (arg === "--debug") {
config.debugGlobalHud = true;
config.debugRendererHud = true;
continue;
}
if (arg === "--debug-global") {
config.debugGlobalHud = true;
continue;
}
if (arg === "--debug-renderer") {
config.debugRendererHud = true;
continue;
}
if (arg === "--error-screen") {
config.errorScreenRequested = true;
if (args[i + 1] && !args[i + 1]!.startsWith("--")) {
config.errorScreenMessage = args[i + 1]!;
i += 1;
}
continue;
}
if (arg.startsWith("--error-screen=")) {
config.errorScreenRequested = true;
config.errorScreenMessage = arg.split("=").slice(1).join("=");
continue;
}
if (arg === "--error-title" && args[i + 1] && !args[i + 1]!.startsWith("--")) {
config.errorScreenTitle = args[i + 1]!;
i += 1;
continue;
}
if (arg.startsWith("--error-title=")) {
config.errorScreenTitle = arg.split("=").slice(1).join("=");
continue;
}
if (arg === "--error-hint" && args[i + 1] && !args[i + 1]!.startsWith("--")) {
config.errorScreenHint = args[i + 1]!;
i += 1;
continue;
}
if (arg.startsWith("--error-hint=")) {
config.errorScreenHint = arg.split("=").slice(1).join("=");
continue;
}
if (arg === "--debug-log-file" && args[i + 1] && !args[i + 1]!.startsWith("--")) {
config.debugLogFile = args[i + 1]!;
i += 1;
continue;
}
if (arg.startsWith("--debug-log-file=")) {
config.debugLogFile = arg.split("=").slice(1).join("=");
continue;
}
if (arg === "--crash-recovery") {
const maybeSession = args[i + 1];
if (maybeSession && !maybeSession.startsWith("--")) {
config.crashRecoverySession = maybeSession;
i += 1;
} else {
config.crashRecoverySession = "Hyprland";
}
continue;
}
}
return config;
}

103
src/renderer/debug-hud.ts Normal file
View File

@@ -0,0 +1,103 @@
import type { CanvasRenderingContext2D } from "@napi-rs/canvas";
import type { Layout } from "./layout";
export type DebugStats = Record<string, string | number | boolean | undefined>;
export type DebugHudOptions = {
showGlobal: boolean;
showRenderer: boolean;
showCustom?: boolean;
};
export type DebugHudData = {
global: DebugStats | string[];
renderer: {
id: string;
label: string;
stats: DebugStats;
fps: number;
};
custom?: DebugStats;
};
export type DebugHud = {
draw: (ctx: CanvasRenderingContext2D, layout: Layout, data: DebugHudData) => void;
};
export function createDebugHud(options: DebugHudOptions): DebugHud {
const padding = 8;
const lineHeight = 16;
const bg = "rgba(0, 0, 0, 0.65)";
const fg = "yellow"; // global
const rendererFg = "#ff66cc"; // renderer
const customFg = "#00c6ff"; // custom
const drawBlock = (
ctx: CanvasRenderingContext2D,
x: number,
y: number,
title: string,
stats: DebugStats | string[],
color: string
): { width: number; height: number } => {
let lines: string[] = [];
if (Array.isArray(stats)) {
lines = stats
} else {
const keys = Object.keys(stats);
lines = [title, ...keys.map((k) => `${k}: ${String(stats[k])}`)];
}
ctx.font = "14px \"JetBrains Mono\", monospace";
const textWidth = Math.max(...lines.map((l) => ctx.measureText(l).width));
const height = lines.length * lineHeight + padding * 2;
const width = textWidth + padding * 2;
ctx.save();
ctx.fillStyle = bg;
ctx.fillRect(x, y, width, height);
ctx.fillStyle = color;
ctx.textBaseline = "top";
lines.forEach((line, i) => {
ctx.fillText(line, x + padding, y + padding + i * lineHeight);
});
ctx.restore();
return { width, height };
};
const draw = (ctx: CanvasRenderingContext2D, layout: Layout, data: DebugHudData) => {
if (!options.showGlobal && !options.showRenderer && !options.showCustom) return;
ctx.save();
ctx.imageSmoothingEnabled = false;
ctx.globalAlpha = 0.9;
let cursorY = padding;
const originX = padding;
if (options.showGlobal) {
const { height } = drawBlock(ctx, originX, cursorY, "Global", data.global, fg);
cursorY += height + padding;
}
if (options.showRenderer) {
const { height } = drawBlock(
ctx,
originX,
cursorY,
`Renderer: ${data.renderer.label}`,
{ fps: data.renderer.fps.toFixed(2), ...data.renderer.stats },
rendererFg
);
cursorY += height + padding;
}
if (options.showCustom && data.custom) {
drawBlock(ctx, originX, cursorY, "Custom", data.custom, customFg);
}
ctx.restore();
};
return { draw };
}

27
src/renderer/fps.ts Normal file
View File

@@ -0,0 +1,27 @@
export type FpsCounter = {
tick: (nowMs: number) => void;
value: number;
};
export function createFpsCounter(sampleWindowMs = 500): FpsCounter {
let lastSampleStart = Date.now();
let frameCount = 0;
let currentFps = 0;
const tick = (nowMs: number) => {
frameCount += 1;
const elapsed = nowMs - lastSampleStart;
if (elapsed >= sampleWindowMs) {
currentFps = (frameCount * 1000) / elapsed;
frameCount = 0;
lastSampleStart = nowMs;
}
};
return {
tick,
get value() {
return currentFps;
}
};
}

155
src/renderer/index.ts Normal file
View File

@@ -0,0 +1,155 @@
import type { Events } from "@kmamal/sdl";
import {
createCanvas,
type Canvas,
type CanvasRenderingContext2D
} from "@napi-rs/canvas";
import { SDLWindow, type WindowProps } from "./window";
type RenderFrame = (
ctx: CanvasRenderingContext2D,
size: { width: number; height: number }
) => void | Promise<void>;
type RendererOptions = WindowProps & { window?: SDLWindow };
export class Renderer {
readonly window: SDLWindow;
readonly canvas: Canvas;
readonly ctx: CanvasRenderingContext2D;
#animation: ReturnType<typeof setInterval> | undefined;
#pixelBuffer: Buffer | undefined;
#stride = 0;
#size: { width: number; height: number };
#stop: (() => void) | undefined;
constructor(options: RendererOptions = {}) {
console.debug("[debug] [renderer] new Renderer")
const { window: providedWindow, ...windowProps } = options;
this.window = providedWindow ?? new SDLWindow(windowProps);
const { width, height } = this.window.size;
this.#size = { width, height };
this.canvas = createCanvas(width, height);
this.ctx = this.canvas.getContext("2d");
this.ctx.imageSmoothingEnabled = false;
this.#syncPixelBuffer();
}
get size(): { width: number; height: number } {
return this.#size;
}
resize(width: number, height: number): void {
this.#size = { width, height };
if (this.canvas.width === width && this.canvas.height === height) {
return;
}
this.canvas.width = width;
this.canvas.height = height;
this.ctx.imageSmoothingEnabled = false;
this.#syncPixelBuffer();
}
present(): void {
if (!this.#pixelBuffer) {
this.#syncPixelBuffer();
}
this.window.renderFromBuffer(
this.canvas.width,
this.canvas.height,
this.#stride,
this.#pixelBuffer!
);
}
#syncPixelBuffer(): void {
this.#pixelBuffer = this.canvas.data();
this.#stride = Math.floor(
this.#pixelBuffer.byteLength / Math.max(1, this.canvas.height)
);
}
requestStop(): void {
this.#stop?.();
}
async run(renderFrame: RenderFrame): Promise<void> {
console.debug("[debug] [renderer] starting render")
const listeners: Array<() => void> = [];
let rendering = false;
const addListener = <E extends Events.Window.Any["type"]>(
event: E,
handler: (event: Extract<Events.Window.Any, { type: E }>) => void
) => {
listeners.push(this.window.on(event, handler));
};
const renderOnce = async () => {
await renderFrame(this.ctx, this.size);
this.present();
};
await renderOnce();
await new Promise<void>((resolve) => {
let stopped = false;
const cleanup = () => {
if (this.#animation) {
clearInterval(this.#animation);
this.#animation = undefined;
}
this.#stop = undefined;
listeners.splice(0).forEach((off) => off());
};
const stop = () => {
if (stopped) return;
stopped = true;
cleanup();
this.window.destroy();
resolve();
};
this.#stop = stop;
const tick = () => {
if (rendering) return;
rendering = true;
void renderOnce().finally(() => {
rendering = false;
});
};
this.#animation = setInterval(tick, 1000 / 60);
this.#animation.unref?.();
addListener("resize", async (event) => {
this.resize(event.pixelWidth, event.pixelHeight);
tick();
});
addListener("expose", () => {
tick();
});
addListener("keyDown", (event) => {
if (event.key === "Escape" || event.key === "Q") {
stop();
}
});
addListener("beforeClose", (event) => {
event.prevent();
stop();
});
addListener("close", () => stop());
});
}
}
export function createRenderer(options: RendererOptions = {}): Renderer {
return new Renderer(options);
}

65
src/renderer/layout.ts Normal file
View File

@@ -0,0 +1,65 @@
export type Layout = {
width: number;
height: number;
viewScale: number;
boxWidth: number;
boxHeight: number;
boxX: number;
boxY: number;
contentScale: number;
drawWidth: number;
drawHeight: number;
x: number;
y: number;
centerX: number;
centerY: number;
};
export function createLayoutCalculator(options: {
baseWidth: number;
baseHeight: number;
viewWidth: number;
viewHeight: number;
}): (size: { width: number; height: number }) => Layout {
let cachedLayout: Layout | undefined;
return (size: { width: number; height: number }): Layout => {
const { width, height } = size;
if (cachedLayout && cachedLayout.width === width && cachedLayout.height === height) {
return cachedLayout;
}
const viewScale = Math.min(width / options.viewWidth, height / options.viewHeight);
const boxWidth = options.viewWidth * viewScale;
const boxHeight = options.viewHeight * viewScale;
const boxX = (width - boxWidth) / 2;
const boxY = (height - boxHeight) / 2;
const contentScale = Math.min(boxWidth / options.baseWidth, boxHeight / options.baseHeight);
const drawWidth = options.baseWidth * contentScale;
const drawHeight = options.baseHeight * contentScale;
const x = boxX + (boxWidth - drawWidth) / 2;
const y = boxY + (boxHeight - drawHeight) / 2;
const centerX = boxX + boxWidth / 2;
const centerY = boxY + boxHeight / 2;
cachedLayout = {
width,
height,
viewScale,
boxWidth,
boxHeight,
boxX,
boxY,
contentScale,
drawWidth,
drawHeight,
x,
y,
centerX,
centerY
};
return cachedLayout;
};
}

View File

@@ -0,0 +1,45 @@
export type LazyResource<T> = {
load: () => Promise<T>;
unload: () => void;
isLoaded: () => boolean;
};
export function createLazyResource<T>(
loader: () => Promise<T>,
dispose?: (value: T) => void
): LazyResource<T> {
let cached: T | null = null;
let inflight: Promise<T> | null = null;
const load = async (): Promise<T> => {
if (cached) return cached;
if (inflight) return inflight;
inflight = (async () => {
const value = await loader();
cached = value;
inflight = null;
return value;
})();
return inflight;
};
const unload = () => {
if (cached && dispose) {
try {
dispose(cached);
} catch (error) {
console.error("[lazy-resource] failed to dispose resource", error);
}
}
cached = null;
inflight = null;
};
return {
load,
unload,
isLoaded: () => cached !== null
};
}

94
src/renderer/video.ts Normal file
View File

@@ -0,0 +1,94 @@
import { ImageData } from "@napi-rs/canvas";
import { resolveAssetPath } from "./assets";
type VideoLoaderOptions = {
width: number;
height: number;
fps?: number;
maxFramesInMemory?: number;
frameSampleStep?: number;
};
export type VideoFrameSequence = {
width: number;
height: number;
fps: number;
durationMs: number;
frames: ImageData[];
};
export async function loadVideoFrames(
relativePath: string,
options: VideoLoaderOptions
): Promise<VideoFrameSequence> {
const targetFps = options.fps ?? 30;
const assetPath = resolveAssetPath(relativePath);
const maxFrames = options.maxFramesInMemory ?? 0;
const sampleStep = Math.max(1, options.frameSampleStep ?? 1);
const ffmpeg = Bun.spawn(
[
"ffmpeg",
"-v", "error",
"-i", assetPath,
"-an",
"-vf", `scale=${options.width}:${options.height}`,
"-r", `${targetFps}`,
"-f", "rawvideo",
"-pix_fmt", "rgba",
"-"
],
{ stdout: "pipe", stderr: "pipe" }
);
const frameSize = options.width * options.height * 4;
const frames: ImageData[] = [];
let residual = new Uint8Array(0);
let decodedFrameCount = 0;
const stderrPromise = ffmpeg.stderr ? Bun.readableStreamToText(ffmpeg.stderr) : Promise.resolve("");
for await (const chunk of ffmpeg.stdout) {
const merged = new Uint8Array(residual.length + chunk.length);
merged.set(residual, 0);
merged.set(chunk, residual.length);
residual = merged;
while (residual.length >= frameSize) {
const frameBytes = residual.slice(0, frameSize);
residual = residual.slice(frameSize);
decodedFrameCount += 1;
if (decodedFrameCount % sampleStep !== 0) {
continue;
}
const clamped = new Uint8ClampedArray(frameBytes.buffer, frameBytes.byteOffset, frameBytes.byteLength);
const image = new ImageData(clamped, options.width, options.height);
if (maxFrames > 0 && frames.length >= maxFrames) {
frames.shift();
}
frames.push(image);
}
}
const exitCode = await ffmpeg.exited;
const stderr = await stderrPromise;
if (exitCode !== 0) {
throw new Error(`ffmpeg exited with code ${exitCode}${stderr ? `: ${stderr}` : ""}`);
}
if (frames.length === 0) {
throw new Error("No frames decoded from video");
}
const effectiveFrameCount = decodedFrameCount;
return {
width: options.width,
height: options.height,
fps: targetFps,
durationMs: (effectiveFrameCount / targetFps) * 1000,
frames
};
}

98
src/renderer/window.ts Normal file
View File

@@ -0,0 +1,98 @@
import assert from "assert";
import sdl, { type Events, type Sdl } from "@kmamal/sdl";
import { createCanvas, Image, type CanvasRenderingContext2D } from "@napi-rs/canvas";
export type WindowProps = {
title?: string;
width?: number;
height?: number;
visible?: boolean;
fullscreen?: boolean;
resizable?: boolean;
borderless?: boolean;
alwaysOnTop?: boolean;
};
export class SDLWindow {
#window: Sdl.Video.Window | undefined;
constructor(props: WindowProps = {}) {
console.debug("[debug] [renderer/window] new SDLWindow", props)
this.#window = sdl.video.createWindow({
...props,
title: props.title ?? "SDL Application"
});
if (process.env.NODE_ENV === "development") {
this.#window.on("resize", (e) => {
this.#window?.setTitle(`${props.title ?? "SDL Application"} [${e.pixelWidth}x${e.pixelHeight}]`)
})
}
}
get size(): { width: number; height: number } {
const { pixelWidth, pixelHeight } = this.Window;
return { width: pixelWidth, height: pixelHeight };
}
get Window(): Sdl.Video.Window {
if (!this.#window) throw "Window not present";
return this.#window;
}
on<EventName extends Events.Window.Any["type"]>(
event: EventName,
handler: (
event: Extract<Events.Window.Any, { type: EventName }>
) => void
): () => void {
const target = this.Window as unknown as {
on: (event: Events.Window.Any["type"], listener: (event: Events.Window.Any) => void) => void;
off?: (
event: Events.Window.Any["type"],
listener: (event: Events.Window.Any) => void
) => void;
removeListener?: (
event: Events.Window.Any["type"],
listener: (event: Events.Window.Any) => void
) => void;
};
target.on(event, handler as (event: Events.Window.Any) => void);
return () => {
if (typeof target.off === "function") {
target.off(event, handler as (event: Events.Window.Any) => void);
return;
}
if (typeof target.removeListener === "function") {
target.removeListener(
event,
handler as (event: Events.Window.Any) => void
);
}
};
}
renderFromBuffer(width: number, height: number, stride: number, buffer: Buffer): void {
this.Window.render(width, height, stride, "rgba32", buffer);
}
renderFromContext(ctx: CanvasRenderingContext2D): void {
const { width, height } = this.size;
const buffer = Buffer.from(ctx.getImageData(0, 0, width, height).data);
this.renderFromBuffer(width, height, width * 4, buffer);
}
setIconFromImage(image: Image): void {
const canvas = createCanvas(image.width, image.height);
const ctx = canvas.getContext("2d");
ctx.drawImage(image as any, 0, 0);
const data = ctx.getImageData(0, 0, image.width, image.height).data;
this.Window.setIcon(image.width, image.height, image.width * 4, "rgba32", Buffer.from(data));
}
destroy(): void {
this.Window.destroy();
this.#window = undefined;
}
}

View File

@@ -0,0 +1,240 @@
import {
createCanvas,
type Canvas,
type CanvasRenderingContext2D
} from "@napi-rs/canvas";
import { createLazyResource } from "../../renderer/lazy-resource";
import type { Layout } from "../../renderer/layout";
import { loadVideoFrames, type VideoFrameSequence } from "../../renderer/video";
import { AudioLoopPlayer } from "../../audio/player";
import type { RendererInstance, RendererProps } from "../types";
const BACKGROUND_VIDEO = {
path: "goner_bg_loop.mp4",
width: 160 * 2,
height: 90 * 2,
fps: 30
} as const;
const MAX_FRAMES_IN_MEMORY = Number(process.env.GONER_VIDEO_MAX_FRAMES ?? "0");
const FRAME_SAMPLE_STEP = Number(process.env.GONER_VIDEO_FRAME_SAMPLE ?? "1");
const VIDEO_LOOP_CROSSFADE_MS = 600;
const OVERLAY_FADE_DURATION_MS = 2500;
type GonerBackgroundResources = {
video: VideoFrameSequence;
videoCanvas: Canvas;
videoCtx: CanvasRenderingContext2D;
videoBlendCanvas: Canvas;
videoBlendCtx: CanvasRenderingContext2D;
audio?: AudioLoopPlayer;
};
export function createDeviceContactBackgroundRenderer(_props: RendererProps = {}): RendererInstance {
console.debug(`[debug] [renderers/device_contact] new RendererInstance`, _props);
let overlayStart = Date.now();
let videoTimeMs = 0;
let lastVideoMeta: { durationMs: number; width: number; height: number; fps: number } | undefined;
const resources = createLazyResource<GonerBackgroundResources>(
async () => {
const video = await loadVideoFrames(BACKGROUND_VIDEO.path, {
width: BACKGROUND_VIDEO.width,
height: BACKGROUND_VIDEO.height,
fps: BACKGROUND_VIDEO.fps,
maxFramesInMemory: Number.isFinite(MAX_FRAMES_IN_MEMORY) ? MAX_FRAMES_IN_MEMORY : 0,
frameSampleStep: Number.isFinite(FRAME_SAMPLE_STEP) && FRAME_SAMPLE_STEP > 0 ? FRAME_SAMPLE_STEP : 1
});
lastVideoMeta = {
durationMs: video.durationMs,
width: video.width,
height: video.height,
fps: video.fps
};
const videoCanvas = createCanvas(video.width, video.height);
const videoCtx = videoCanvas.getContext("2d");
const videoBlendCanvas = createCanvas(video.width, video.height);
const videoBlendCtx = videoBlendCanvas.getContext("2d");
let audio: AudioLoopPlayer | undefined;
try {
audio = await AudioLoopPlayer.fromAsset("AUDIO_ANOTHERHIM.ogg");
audio.start();
} catch (error) {
console.error("[renderers/device_contact] failed to start audio loop", error);
}
return {
video,
videoCanvas,
videoCtx,
videoBlendCanvas,
videoBlendCtx,
audio
};
},
(resource) => {
if (resource.audio) {
try {
resource.audio.stop();
} catch (error) {
console.error("[renderers/device_contact] failed to stop audio loop", error);
}
}
resource.video.frames.length = 0;
resource.videoCanvas.width = 0;
resource.videoCanvas.height = 0;
resource.videoBlendCanvas.width = 0;
resource.videoBlendCanvas.height = 0;
}
);
const render = async ({ ctx, deltaMs, layout }: { ctx: CanvasRenderingContext2D; deltaMs: number; layout: Layout; }) => {
const {
video,
videoCanvas,
videoCtx,
videoBlendCanvas,
videoBlendCtx
} = await resources.load();
const clampedDelta = Math.max(0, deltaMs);
videoTimeMs += clampedDelta;
while (videoTimeMs >= video.durationMs) {
videoTimeMs -= video.durationMs;
}
drawVideoBackground(
ctx,
layout,
videoTimeMs,
video,
videoCanvas,
videoCtx,
videoBlendCanvas,
videoBlendCtx
);
drawOverlay(ctx, layout, overlayStart);
};
return {
id: "device_contact",
label: "DEVICE CONTACT",
render,
unload: () => {
overlayStart = Date.now();
videoTimeMs = 0;
resources.unload();
},
isLoaded: resources.isLoaded,
getDebugStats: () => {
const meta = lastVideoMeta;
return {
loaded: resources.isLoaded(),
videoMs: Number.isFinite(videoTimeMs) ? videoTimeMs.toFixed(2) : 0,
durationMs: meta?.durationMs,
fps: meta?.fps,
width: meta?.width,
height: meta?.height,
maxFrames: MAX_FRAMES_IN_MEMORY || "all",
sampleStep: FRAME_SAMPLE_STEP
};
},
getDebugHudStats: () => ({
offsetMs: videoTimeMs.toFixed(0),
sample: FRAME_SAMPLE_STEP
})
} as RendererInstance;
}
function drawOverlay(
ctx: CanvasRenderingContext2D,
layout: Layout,
overlayStart: number
) {
const overlayProgress = Math.min(
1,
Math.max(0, (Date.now() - overlayStart) / OVERLAY_FADE_DURATION_MS)
);
const overlayAlpha = 1 - overlayProgress * 0.7; // fade 1 -> 0.3
ctx.save();
ctx.globalAlpha = overlayAlpha;
ctx.fillStyle = "black";
ctx.fillRect(0, 0, layout.width, layout.height);
ctx.restore();
}
function drawVideoBackground(
ctx: CanvasRenderingContext2D,
layout: Layout,
videoTimeMs: number,
video: VideoFrameSequence,
videoCanvas: Canvas,
videoCtx: CanvasRenderingContext2D,
videoBlendCanvas: Canvas,
videoBlendCtx: CanvasRenderingContext2D
) {
const {
boxX,
boxY,
boxWidth,
boxHeight
} = layout;
const elapsed = videoTimeMs % video.durationMs;
const frameIndex = Math.floor((elapsed / 1000) * video.fps) % video.frames.length;
const frame = video.frames[frameIndex] ?? video.frames[0];
let blendFrame: VideoFrameSequence["frames"][number] | undefined;
let blendAlpha = 0;
if (elapsed >= video.durationMs - VIDEO_LOOP_CROSSFADE_MS) {
const fadeT = (elapsed - (video.durationMs - VIDEO_LOOP_CROSSFADE_MS)) / VIDEO_LOOP_CROSSFADE_MS;
blendAlpha = Math.min(1, Math.max(0, fadeT));
const loopElapsed = elapsed - (video.durationMs - VIDEO_LOOP_CROSSFADE_MS); // 0..crossfade
const blendIndex = Math.floor((loopElapsed / 1000) * video.fps) % video.frames.length;
blendFrame = video.frames[blendIndex] ?? video.frames[0];
}
// Draw the raw frame to an offscreen canvas, then scale to the target size.
videoCtx.clearRect(0, 0, video.width, video.height);
videoCtx.putImageData(frame as any, 0, 0);
if (blendFrame && blendAlpha > 0) {
videoBlendCtx.clearRect(0, 0, video.width, video.height);
videoBlendCtx.putImageData(blendFrame as any, 0, 0);
videoCtx.save();
videoCtx.globalAlpha = blendAlpha;
(videoCtx as any).drawImage(videoBlendCanvas as any, 0, 0);
videoCtx.restore();
}
const scale = boxHeight / video.height;
const scaledWidth = video.width * scale;
// crop horizontal
let srcX = 0;
let srcWidth = video.width;
let destWidth = scaledWidth;
if (scaledWidth > boxWidth) {
const cropWidth = boxWidth / scale;
srcX = (video.width - cropWidth) / 2;
srcWidth = cropWidth;
destWidth = boxWidth;
}
const drawX = boxX + (boxWidth - destWidth) / 2;
const drawY = boxY;
ctx.save();
ctx.imageSmoothingEnabled = false;
ctx.beginPath();
ctx.rect(boxX, boxY, boxWidth, boxHeight);
ctx.clip();
(ctx as any).drawImage(
videoCanvas as any,
srcX, 0, srcWidth, video.height,
drawX, drawY, destWidth, boxHeight
);
ctx.restore();
}

View File

@@ -0,0 +1,192 @@
import type { CanvasRenderingContext2D } from "@napi-rs/canvas";
import {
drawBitmapTextPerGlyph,
loadBitmapFont,
measureTextWidth
} from "../../bootsequence/font";
import { loadImageAsset } from "../../renderer/assets";
import { createLazyResource } from "../../renderer/lazy-resource";
import type { Layout } from "../../renderer/layout";
import type { RendererInstance, RendererProps } from "../types";
type ErrorRendererProps = {
title?: string;
message?: string | string[];
hint?: string;
};
type ErrorResources = {
font: Awaited<ReturnType<typeof loadBitmapFont>>;
heart: Awaited<ReturnType<typeof loadImageAsset>>;
};
function wrapBitmapText(
text: string,
maxWidth: number,
measure: (t: string) => number
): string[] {
const words = text.split(/\s+/);
const lines: string[] = [];
let current = "";
for (const w of words) {
const next = current ? `${current} ${w}` : w;
if (measure(next) <= maxWidth) {
current = next;
} else {
if (current) lines.push(current);
current = w;
}
}
if (current) lines.push(current);
return lines;
}
export function createErrorRenderer(props: RendererProps = {}): RendererInstance {
console.debug(`[debug] [renderers/error] new RendererInstance`, props);
const config: ErrorRendererProps = {
title: typeof props.title === "string" ? props.title : "ERROR",
message:
typeof props.message === "string"
? props.message
: Array.isArray(props.message)
? props.message.map(String).join("\n")
: "Something went wrong.",
hint: typeof props.hint === "string" ? props.hint : "Press Enter to exit."
};
const resources = createLazyResource<ErrorResources>(async () => {
const font = await loadBitmapFont();
const heart = await loadImageAsset("IMAGE_SOUL_BLUR_0.png");
return { font, heart };
});
let blinkMs = 0;
let acknowledged = false;
const render = async ({
ctx,
layout,
deltaMs
}: {
ctx: CanvasRenderingContext2D;
layout: Layout;
deltaMs: number;
}) => {
const { font, heart } = await resources.load();
blinkMs += deltaMs;
const pulse = 0.5 + Math.sin(blinkMs / 300) * 0.5;
ctx.save();
ctx.fillStyle = "black";
ctx.fillRect(0, 0, layout.width, layout.height);
ctx.translate(layout.x, layout.y);
ctx.imageSmoothingEnabled = false;
const scale = layout.contentScale;
ctx.scale(scale, scale);
const w = layout.drawWidth / scale;
const h = layout.drawHeight / scale;
const cx = w / 2;
/* ---------- title ---------- */
const title = config.title ?? "ERROR";
const titleScale = 1.2;
const titleWidth = measureTextWidth(title, font, { scale: titleScale });
const titleY = font.lineHeight * 1.5;
drawBitmapTextPerGlyph(
ctx,
font,
title,
cx - titleWidth / 2,
titleY,
{
scale: titleScale
}
);
/* ---------- body ---------- */
const bodyScale = 0.7;
const maxWidth = w * 0.9;
const messageText = Array.isArray(config.message)
? config.message.join(" ")
: config.message ?? "";
const wrapped = wrapBitmapText(
messageText,
maxWidth / bodyScale,
(t) => measureTextWidth(t, font)
);
ctx.save();
ctx.scale(bodyScale, bodyScale);
const bodyCx = cx / bodyScale;
const bodyStartY = h / bodyScale / 2 - (wrapped.length * font.lineHeight) / 2;
for (let i = 0; i < wrapped.length; i++) {
const line = wrapped[i] ?? "";
const lw = measureTextWidth(line, font);
drawBitmapTextPerGlyph(
ctx,
font,
line,
bodyCx - lw / 2,
bodyStartY + i * font.lineHeight
);
}
ctx.restore();
/* ---------- hint ---------- */
const hint = config.hint ?? "";
const hintScale = 0.2;
const hintWidth = measureTextWidth(hint, font, { scale: hintScale });
const hintY = h - font.lineHeight * 2;
ctx.globalAlpha = 0.7 + pulse * 0.3;
drawBitmapTextPerGlyph(
ctx,
font,
hint,
cx - hintWidth / 2,
hintY,
{
scale: hintScale
}
);
ctx.restore();
};
return {
id: "errormessage",
label: "Error",
render,
handleKey(key) {
if (!key) return true;
},
shouldExit() {
return acknowledged;
},
getResult() {
return { acknowledged, title: config.title, message: config.message };
},
unload() {
// nothing to dispose
},
isLoaded: resources.isLoaded,
getDebugStats() {
return {
loaded: resources.isLoaded(),
acknowledged,
blinkMs: Number(blinkMs.toFixed(1))
};
}
};
}

55
src/renderers/index.ts Normal file
View File

@@ -0,0 +1,55 @@
import { createDeviceContactBackgroundRenderer } from "./device_contact";
import { createErrorRenderer } from "./error";
import { createRecoveryMenuRenderer } from "./recoverymenu";
import type { RendererFactory, RendererInstance, RendererProps, RendererRegistry } from "./types";
const registry: Record<string, { label: string; factory: RendererFactory }> = {
device_contact: { label: "DEVICE CONTACT", factory: createDeviceContactBackgroundRenderer },
recoverymenu: { label: "Recovery", factory: createRecoveryMenuRenderer },
errormessage: { label: "Error", factory: createErrorRenderer }
};
const aliases: Record<string, keyof typeof registry> = {
"goner-bg": "device_contact",
error: "errormessage"
};
const resolveId = (id: string): keyof typeof registry => {
return aliases[id] ?? (id as keyof typeof registry);
};
export function createRendererRegistry(options: { defaultId?: string; rendererProps?: Record<string, RendererProps> } = {}): RendererRegistry {
console.debug(`[debug] [renderers] new RendererRegistry`, options);
const rendererPropsById = options.rendererProps ?? {};
let activeId: keyof typeof registry | null = null;
let activeInstance: RendererInstance | null = null;
const switchTo = (id: string, propsOverride?: RendererProps): RendererInstance => {
const resolvedId = resolveId(id);
const target = registry[resolvedId];
if (!target) {
throw new Error(`[debug] [renderer] Renderer "${id}" not found`);
}
if (activeInstance) {
console.warn(`[debug] [renderers] unloading current ${activeInstance.id}`);
activeInstance.unload();
}
activeId = resolvedId;
activeInstance = target.factory(propsOverride ?? rendererPropsById[resolvedId]);
return activeInstance;
};
if (options.defaultId) {
switchTo(options.defaultId);
}
const getActive = () => {
if (!activeInstance || !activeId) {
throw new Error("No active renderer");
}
return activeInstance;
};
const list = () => Object.entries(registry).map(([id, meta]) => ({ id, label: meta.label }));
return { getActive, switchTo, list };
}

View File

@@ -0,0 +1,338 @@
import type { CanvasRenderingContext2D } from "@napi-rs/canvas";
import * as sdl from "@kmamal/sdl";
import { decodeOggToPCM } from "../../audio/decoder";
import { AudioLoopPlayer } from "../../audio/player";
import { loadImageAsset, resolveAssetPath } from "../../renderer/assets";
import { createLazyResource } from "../../renderer/lazy-resource";
import type { Layout } from "../../renderer/layout";
import {
drawBitmapTextPerGlyph,
loadBitmapFont,
measureTextWidth
} from "../../bootsequence/font";
import type { RendererInstance, RendererProps } from "../types";
/* ---------------- */
function wrapBitmapText(
text: string,
maxWidth: number,
measure: (t: string) => number
): string[] {
const words = text.split(/\s+/);
const lines: string[] = [];
let current = "";
for (const w of words) {
const next = current ? `${current} ${w}` : w;
if (measure(next) <= maxWidth) {
current = next;
} else {
if (current) lines.push(current);
current = w;
}
}
if (current) lines.push(current);
return lines;
}
/* ---------------- */
type AudioSamplePlayer = {
play(): void;
dispose(): void;
};
type MenuResources = {
font: Awaited<ReturnType<typeof loadBitmapFont>>;
heart: Awaited<ReturnType<typeof loadImageAsset>>;
drone: AudioLoopPlayer;
sndMove: AudioSamplePlayer;
sndSelect: AudioSamplePlayer;
};
type RecoveryMenuProps = {
question?: string;
yesLabel?: string;
noLabel?: string;
};
/* ---------------- */
async function loadSample(alwaysTry: string[]): Promise<AudioSamplePlayer> {
for (const rel of alwaysTry) {
const path = resolveAssetPath(rel);
try {
const decoded = await decodeOggToPCM(path);
const playbackDevice =
sdl.audio.devices.find((d) => d.type === "playback") ??
{ type: "playback" as const };
const device = sdl.audio.openDevice(playbackDevice as any, {
format: decoded.format,
channels: decoded.channels as 1 | 2 | 4 | 6,
frequency: decoded.sampleRate
});
return {
play() {
device.clearQueue();
device.enqueue(decoded.pcm);
device.play(false);
},
dispose() {
device.clearQueue();
device.close();
}
};
} catch { }
}
throw new Error("sample load failed");
}
/* ---------------- */
export function createRecoveryMenuRenderer(
props: RendererProps = {}
): RendererInstance {
console.debug(`[debug] [renderers/recoverymenu] new RendererInstance`, props);
const config: RecoveryMenuProps = {
question:
typeof props.question === "string"
? props.question
: "????????/?",
yesLabel: typeof props.yesLabel === "string" ? props.yesLabel : "Yes",
noLabel: typeof props.noLabel === "string" ? props.noLabel : "No"
};
const resources = createLazyResource<MenuResources>(async () => {
const font = await loadBitmapFont();
const heart = await loadImageAsset("IMAGE_SOUL_BLUR_0.png");
const drone = await AudioLoopPlayer.fromAsset("AUDIO_DRONE.ogg");
const sndMove = await loadSample(["snd_menumove.wav", "snd_menumode.wav"]);
const sndSelect = await loadSample(["snd_select.wav"]);
return { font, heart, drone, sndMove, sndSelect };
});
let blinkMs = 0;
let selection: "yes" | "no" = "yes";
let confirmed = false;
const render = async ({
ctx,
layout,
deltaMs
}: {
ctx: CanvasRenderingContext2D;
layout: Layout;
deltaMs: number;
}) => {
const { font, heart, drone } = await resources.load();
if (!drone.playing) drone.start();
blinkMs += deltaMs;
const pulse = 0.5 + Math.sin(blinkMs / 300) * 0.5;
ctx.save();
ctx.fillStyle = "black";
ctx.fillRect(0, 0, layout.width, layout.height);
ctx.translate(layout.x, layout.y);
ctx.imageSmoothingEnabled = false;
const scale = layout.contentScale;
ctx.scale(scale, scale);
const w = layout.drawWidth / scale;
const h = layout.drawHeight / scale;
const cx = w / 2;
const question = config.question ?? "";
const yes = config.yesLabel ?? "Yes";
const no = config.noLabel ?? "No";
/* ---------- question ---------- */
const QUESTION_SCALE = 0.62;
const QUESTION_MAX_WIDTH = w * 0.9;
ctx.save();
ctx.scale(QUESTION_SCALE, QUESTION_SCALE);
const scx = cx / QUESTION_SCALE;
const sh = h / QUESTION_SCALE;
const wrapped = wrapBitmapText(
question,
QUESTION_MAX_WIDTH / QUESTION_SCALE,
(t) => measureTextWidth(t, font)
);
const qStartY = sh - font.lineHeight * 5;
for (let i = 0; i < wrapped.length; i++) {
const line = wrapped[i];
const lw = measureTextWidth(line ?? "", font);
drawBitmapTextPerGlyph(
ctx,
font,
line ?? "",
scx - lw / 2,
(qStartY + i * font.lineHeight) - 50
);
}
ctx.restore();
/* ---------- options ---------- */
const ANSWER_SCALE = 0.8;
const spacing = font.lineHeight * ANSWER_SCALE * 1.2;
const yesW = measureTextWidth(yes, font, { scale: ANSWER_SCALE });
const noW = measureTextWidth(no, font, { scale: ANSWER_SCALE });
const yesY = h - font.lineHeight * 3;
const noY = yesY + spacing;
const yesX = cx - yesW / 2;
const noX = cx - noW / 2;
drawBitmapTextPerGlyph(
ctx,
font,
yes,
yesX,
yesY,
{
scale: ANSWER_SCALE
}
);
drawBitmapTextPerGlyph(
ctx,
font,
no,
noX,
noY,
{
scale: ANSWER_SCALE
}
);
const heartSize = font.lineHeight * 0.75;
const selW = selection === "yes" ? yesW : noW;
const selY = selection === "yes" ? yesY : noY;
const heartX = cx - selW / 2 - heartSize - 6;
const heartY = selY + (font.lineHeight * ANSWER_SCALE - heartSize) / 2;
ctx.globalAlpha = 0.7 + pulse * 0.3;
(ctx as any).drawImage(
heart as any,
0,
0,
heart.width,
heart.height,
heartX,
heartY,
heartSize,
heartSize
);
ctx.restore();
};
const onSelect = (dir: "yes" | "no") => {
const changed = dir !== selection;
selection = dir;
void resources.load().then((r) => {
(changed ? r.sndMove : r.sndSelect).play();
});
};
return {
id: "recoverymenu",
label: "Recovery Menu",
render,
handleKey(key) {
if (!key) return true;
const k = key.toLowerCase();
if (
k === "arrowup" ||
k === "up" ||
k === "w" ||
k === "k"
) {
onSelect(selection === "yes" ? "no" : "yes");
return true;
}
if (
k === "arrowdown" ||
k === "down" ||
k === "s" ||
k === "j"
) {
onSelect(selection === "yes" ? "no" : "yes");
return true;
}
if (k === "arrowleft" || k === "left") {
onSelect("yes");
return true;
}
if (k === "arrowright" || k === "right") {
onSelect("no");
return true;
}
if (
k === "enter" ||
k === "return" ||
k === " " ||
k === "space" ||
k === "z"
) {
confirmed = true;
onSelect(selection);
return true;
}
return true;
},
shouldExit() {
return confirmed;
},
getResult() {
return selection;
},
unload() {
if (!resources.isLoaded()) return;
void resources.load().then((r) => {
r.drone.stop();
r.sndMove.dispose();
r.sndSelect.dispose();
});
},
isLoaded: resources.isLoaded,
getDebugStats() {
return {
loaded: resources.isLoaded(),
selection,
blinkMs: Number(blinkMs.toFixed(1))
};
},
getDebugHudStats() {
return {
selection,
dronePlaying: resources.isLoaded()
};
}
};
}

35
src/renderers/types.ts Normal file
View File

@@ -0,0 +1,35 @@
import type { CanvasRenderingContext2D } from "@napi-rs/canvas";
import type { Layout } from "../renderer/layout";
export type RendererProps = Record<string, unknown>;
export type RendererRenderArgs = {
ctx: CanvasRenderingContext2D;
layout: Layout;
deltaMs: number;
};
export type RendererDebugHud = (ctx: CanvasRenderingContext2D, layout: Layout) => void;
export type RendererInstance = {
id: string;
label: string;
render: (args: RendererRenderArgs) => Promise<void> | void;
handleKey?: (key: string | null) => boolean | void;
unload: () => void;
isLoaded: () => boolean;
getDebugStats?: () => Record<string, string | number | boolean | undefined>;
getDebugHudStats?: () => Record<string, string | number | boolean | undefined>;
shouldExit?: () => boolean;
getResult?: () => unknown;
};
export type RendererFactory = (props?: RendererProps) => RendererInstance;
export type RendererRegistry = {
getActive: () => RendererInstance;
switchTo: (id: string, propsOverride?: RendererProps) => RendererInstance;
list: () => Array<{ id: string; label: string }>;
};

6
src/types.ts Normal file
View File

@@ -0,0 +1,6 @@
export type BootsequenceAnswers = {
char: string;
desktop: string;
color: string;
gift: string;
};

574
src/ui/app.ts Normal file
View File

@@ -0,0 +1,574 @@
import { createWriteStream, mkdirSync } from "node:fs";
import { dirname } from "node:path";
import { type CanvasRenderingContext2D } from "@napi-rs/canvas";
import { createBootSequenceUI } from "../bootsequence/questions";
import { GREETD_SOCKET, handoffToGreetd } from "../desktop";
import type { BootsequenceAnswers } from "../types";
import { loadImageAsset } from "../renderer/assets";
import { parseCli } from "../renderer/cli";
import { createDebugHud } from "../renderer/debug-hud";
import { createFpsCounter } from "../renderer/fps";
import { createLayoutCalculator } from "../renderer/layout";
import { createRenderer } from "../renderer/index";
import { SDLWindow } from "../renderer/window";
import { createRendererRegistry } from "../renderers";
import type { RendererInstance, RendererProps } from "../renderers/types";
const BUNVERS = `Bun ${Bun.version} ${process.platform} ${process.arch}`;
const DEFAULT_DEBUG_LOG_FILE = "/tmp/device_contact.debug.log";
const LOG_LIFETIME_MS = 8_000;
const LOG_FADE_MS = 3_000;
const LOG_MAX_LINES = 64;
const debugLogEntries: DebugLogEntry[] = [];
export async function runDeviceContactUI(argv: string[] = process.argv) {
const cli = parseCli(argv);
const isDev = process.env.NODE_ENV === "development";
const debugOptions = {
showGlobal: isDev || cli.debugGlobalHud || process.env.DEBUG_UI === "true",
showRenderer: isDev || cli.debugRendererHud || process.env.DEBUG_RENDERER === "true" || cli.debugGlobalHud,
showCustom: isDev || cli.debugGlobalHud || process.env.DEBUG_UI === "true"
};
const debugLoggingEnabled = debugHudOptionsEnabled(debugOptions);
const debugLogFile =
cli.debugLogFile ??
process.env.DEBUG_LOG_FILE ??
(debugLoggingEnabled ? DEFAULT_DEBUG_LOG_FILE : undefined);
const restoreDebug =
debugLoggingEnabled || debugLogFile
? hookDebugLogs({ filePath: debugLogFile })
: () => { };
console.debug("[debug] ESTABLISHING CONNECTION");
console.debug("[debug]", BUNVERS);
const isCrashRecovery = !!(cli.crashRecoverySession ?? process.env.CRASH_RECOVERY_SESSION);
const forcedErrorScreen = Boolean(cli.errorScreenRequested);
const requestedRenderer = cli.rendererId ?? "device_contact";
const defaultRendererId = forcedErrorScreen
? "errormessage"
: isCrashRecovery
? "recoverymenu"
: requestedRenderer;
const shouldRunBootSequence = defaultRendererId !== "recoverymenu" && defaultRendererId !== "errormessage";
const isTTY = process.env.SDL_VIDEODRIVER === "kmsdrm";
const isCage = process.env.IS_CAGE === "true";
const windowOptions = {
// DO NOT CHANGE TITLE
title: "DEVICE CONTACT (DELTARUNE Chapter 1)",
width: 1920,
height: 1080,
fullscreen: true
};
const window = new SDLWindow(windowOptions);
window.Window.setFullscreen(true);
// will segfault bun if ran in tty
if (!isTTY && !isCage) {
window.on("keyUp", (e) => {
if (e.key === "f11") {
window.Window.setFullscreen(!window.Window.fullscreen);
}
});
}
const icon = await loadImageAsset("icon.png");
window.setIconFromImage(icon);
window.Window.setResizable(true);
window.Window.setAccelerated(true);
if (isTTY) {
console.debug("[debug] KMSDRM detected, What the fuck?? Deltarune in the TTY?");
}
if (isCage) {
console.debug("[debug] Cage detected, are you trying to make a login manager or something?");
setInterval(() => {
try {
if (!window.Window.fullscreen) window.Window.setFullscreen(true);
} catch { }
}, 100)
}
// Base dim for UI/layout (matches the original background logical size).
const baseWidth = 160;
const baseHeight = 120;
const viewWidth = 1280;
const viewHeight = 960;
const uiScale = 0.6;
const crashRecoverySession = cli.crashRecoverySession ?? process.env.CRASH_RECOVERY_SESSION;
const renderer = createRenderer({ window });
renderer.ctx.imageSmoothingEnabled = false;
const rendererPropsById: Record<string, RendererProps> = {
recoverymenu: {
question: crashRecoverySession
? `${crashRecoverySession} crashed. Do you want to restart it?`
: "?????",
yesLabel: "Yes",
noLabel: "No",
session: crashRecoverySession
},
errormessage: {
title: cli.errorScreenTitle ?? process.env.ERROR_TITLE ?? "ERROR",
message: cli.errorScreenMessage ?? process.env.ERROR_MESSAGE ?? "An unexpected error occurred.",
hint: cli.errorScreenHint ?? process.env.ERROR_HINT ?? "Switch between VT's with CTRL+ALT+F[0-9]."
}
};
const rendererRegistry = createRendererRegistry({ rendererProps: rendererPropsById });
let activeRenderer: RendererInstance | null = null;
let rendererExit: { id: string; result: unknown } | null = null;
let fatalErrorProps: RendererProps | null = null;
const requireActiveRenderer = () => {
if (!activeRenderer) {
throw new Error("Active renderer not initialized");
}
return activeRenderer;
};
const logSelectedRenderer = () => {
const current = requireActiveRenderer();
console.debug("[debug] renderer selected", current.id);
};
if (crashRecoverySession) {
console.debug("[debug] crash recovery mode", crashRecoverySession);
}
if (debugLogFile) {
console.debug("[debug] writing debug log to", debugLogFile);
}
const debugHud = createDebugHud(debugOptions);
const globalFps = createFpsCounter();
const rendererFps = createFpsCounter();
const getLayout = createLayoutCalculator({
baseWidth,
baseHeight,
viewWidth,
viewHeight
});
let bootUI = shouldRunBootSequence
? await createBootSequenceUI(baseWidth, baseHeight)
: null;
let bootAnswers: BootsequenceAnswers | null = null;
let contactComplete = false;
const rendererIds = Array.from(new Set(rendererRegistry.list().map((r) => r.id)));
let lastFrameMs = Date.now();
const requestRendererExit = () => {
const current = requireActiveRenderer();
if (rendererExit) return;
rendererExit = {
id: current.id,
result: current.getResult ? current.getResult() : undefined
};
renderer.requestStop();
};
const switchRenderer = (id: string) => {
if (activeRenderer && id === activeRenderer.id) return activeRenderer;
rendererExit = null;
const next = rendererRegistry.switchTo(id, rendererPropsById[id]);
renderer.ctx.clearRect(0, 0, renderer.canvas.width, renderer.canvas.height);
activeRenderer = next;
lastFrameMs = Date.now();
return activeRenderer;
};
activeRenderer = switchRenderer(defaultRendererId);
logSelectedRenderer();
if (cli.helpRequested) {
console.log(`Usage: bun run src/ui/app.ts [options]
Options:
--renderer <id> Select renderer by id (default: ${defaultRendererId})
--debug Enable all debug HUD panels
--debug-global Enable global debug HUD
--debug-renderer Enable renderer debug HUD
--error-screen [msg] Start on error screen (optional message)
--error-title <t> Set error screen title
--error-hint <h> Set error screen hint
--debug-log-file <path> Write debug logs to file (default: ${DEFAULT_DEBUG_LOG_FILE})
--crash-recovery [id] Start in crash recovery mode (optional session id)
--help, -h Show this help message`);
process.exit(0);
}
window.on("keyDown", (e) => {
const currentRenderer = requireActiveRenderer();
currentRenderer.handleKey?.(e.key ?? null);
bootUI?.handleKey({
key: e.key ?? null,
scancode: e.scancode ?? 0,
ctrl: e.ctrl ?? 0,
shift: e.shift ?? 0,
alt: e.alt ?? 0,
super: e.super ?? 0
});
});
const drawFrame = async (
_ctx: CanvasRenderingContext2D,
size: { width: number; height: number }
): Promise<void> => {
const currentRenderer = requireActiveRenderer();
const { ctx } = renderer;
ctx.imageSmoothingEnabled = false;
const layout = getLayout(size);
const {
width,
height,
contentScale,
x,
y
} = layout;
const now = Date.now();
const deltaMs = now - lastFrameMs;
lastFrameMs = now;
globalFps.tick(now);
ctx.clearRect(0, 0, width, height);
await currentRenderer.render({ ctx, deltaMs, layout });
rendererFps.tick(now);
if (currentRenderer.shouldExit?.()) {
if (currentRenderer.id === "recoverymenu") {
activeRenderer = switchRenderer("device_contact");
logSelectedRenderer();
if (!bootUI) {
bootUI = await createBootSequenceUI(baseWidth, baseHeight);
}
} else {
requestRendererExit();
}
}
// Text/UI layer: above BG/overlay, below FPS.
ctx.save();
const uiOffsetX = (contentScale - contentScale * uiScale) * baseWidth * 0.5;
const uiOffsetY = (contentScale - contentScale * uiScale) * baseHeight * 0.5;
ctx.translate(x + uiOffsetX, y + uiOffsetY);
ctx.scale(contentScale * uiScale, contentScale * uiScale);
if (bootUI) {
bootUI.update(deltaMs);
bootUI.render(ctx);
if (bootUI.isFinished()) {
if (!bootAnswers) {
bootAnswers = bootUI.getAnswers();
}
contactComplete = true;
renderer.requestStop();
window.Window.destroy();
}
}
ctx.restore();
debugHud.draw(ctx, layout, {
global: [
`${globalFps.value.toFixed(2)} FPS`,
`${window.Window.display.name ? (process.env.MONITOR_SN ? window.Window.display.name.replaceAll((process.env.MONITOR_SN || "") + " ", "") : window.Window.display.name) : "unknown"} ${window.Window.display.frequency}hz [${process.env.SDL_VIDEODRIVER ?? "sdl2"}]`,
`activeRenderer: ${currentRenderer.id}`,
`crashRecoverySession: ${crashRecoverySession ?? "none"}`,
`${BUNVERS}`
],
renderer: {
id: currentRenderer.id,
label: currentRenderer.label,
stats: {
...(currentRenderer.getDebugStats ? currentRenderer.getDebugStats() : {}),
...(currentRenderer.getDebugHudStats ? currentRenderer.getDebugHudStats() : {})
},
fps: rendererFps.value
},
custom: {
greetdSocket: GREETD_SOCKET,
tty: isTTY,
cage: isCage
}
});
if (debugLoggingEnabled) {
drawDebugLogs(ctx, layout, now);
}
};
console.debug("[debug] reached main");
try {
await renderer.run(drawFrame);
if (rendererExit) {
console.debug("[debug] renderer exit requested", rendererExit);
}
} finally {
requireActiveRenderer().unload();
}
if (contactComplete) {
const desktopHintRaw =
(bootAnswers as BootsequenceAnswers | null | undefined)?.desktop ??
crashRecoverySession ??
process.env.DESKTOP_SESSION_FRIENDLY_NAME ??
process.env.XDG_CURRENT_DESKTOP;
const desktopHint = typeof desktopHintRaw === "string" ? desktopHintRaw : undefined;
try {
await handoffToGreetd(desktopHint);
} catch (error) {
console.error("[ui/app] greetd handoff failed\n", error);
console.error("[ui/app] Press CTRL+ALT+F[0-9] to switch to a different VT");
if (process.env.NODE_ENV !== "development") {
process.exit(1);
}
}
}
if (fatalErrorProps) {
return
}
restoreDebug();
}
if (import.meta.main) {
await runDeviceContactUI();
}
type DebugLogEntry = {
message: string;
ts: number;
};
function debugHudOptionsEnabled(options: { showGlobal: boolean; showRenderer: boolean; showCustom?: boolean }) {
return options.showGlobal || options.showRenderer || Boolean(options.showCustom);
}
function hookDebugLogs(options?: { filePath?: string }): () => void {
const originalDebug = console.debug;
let logStream = createDebugLogStream(options?.filePath, originalDebug);
if (logStream) {
logStream.on("error", (error) => {
originalDebug("[debug] debug log stream error", error);
logStream?.destroy();
logStream = null;
});
}
console.debug = (...args: unknown[]) => {
const message = formatDebugMessage(args);
const now = Date.now();
debugLogEntries.push({ message, ts: now });
pruneOldLogs(now);
if (debugLogEntries.length > 100) {
debugLogEntries.splice(0, debugLogEntries.length - 100);
}
if (logStream) {
try {
logStream.write(`[${new Date(now).toISOString()}] ${message}\n`);
} catch (error) {
originalDebug("[debug] failed to write debug log to file", error);
logStream = null;
}
}
originalDebug(...args);
};
return () => {
console.debug = originalDebug;
debugLogEntries.length = 0;
if (logStream) {
logStream.end();
logStream = null;
}
};
}
function formatDebugMessage(args: unknown[]): string {
return args
.map((arg) => {
if (typeof arg === "string") return arg;
if (arg instanceof Error) {
return `${arg.name}: ${arg.message}`;
}
try {
return JSON.stringify(arg, (_k, v) => {
if (typeof v === "bigint") return v.toString();
return v;
});
} catch {
return String(arg);
}
})
.join(" ");
}
function drawDebugLogs(ctx: CanvasRenderingContext2D, layout: { width: number; height: number }, now: number) {
const padding = 8;
const lineHeight = 18;
pruneOldLogs(now);
const visible = debugLogEntries.slice(-LOG_MAX_LINES);
if (visible.length === 0) return;
ctx.save();
ctx.font = "14px \"JetBrains Mono\", monospace";
ctx.textBaseline = "bottom";
let cursorY = layout.height - padding;
const cursorX = padding;
for (const entry of [...visible].reverse()) {
const age = now - entry.ts;
const fadeStart = LOG_LIFETIME_MS - LOG_FADE_MS;
const alpha = age <= fadeStart ? 1 : Math.max(0, (LOG_LIFETIME_MS - age) / LOG_FADE_MS);
const text = entry.message;
ctx.globalAlpha = alpha;
ctx.fillStyle = "#66ccff";
ctx.fillText(text, cursorX, cursorY);
cursorY -= lineHeight;
}
ctx.restore();
}
function pruneOldLogs(now: number) {
for (let i = debugLogEntries.length - 1; i >= 0; i--) {
if (now - debugLogEntries[i]!.ts >= LOG_LIFETIME_MS) {
debugLogEntries.splice(i, 1);
}
}
}
async function runErrorScreen(
props: RendererProps,
options?: { debugOptions?: { showGlobal: boolean; showRenderer: boolean; showCustom?: boolean } }
) {
const isTTY = process.env.SDL_VIDEODRIVER === "kmsdrm";
const isCage = process.env.IS_CAGE === "true";
const windowOptions = {
// DO NOT CHANGE TITLE
title: "DEVICE CONTACT (DELTARUNE Chapter 1)",
width: 1920,
height: 1080,
fullscreen: true
};
const window = new SDLWindow(windowOptions);
window.Window.setFullscreen(true);
if (!isTTY && !isCage) {
window.on("keyUp", (e) => {
if (e.key === "f11") {
window.Window.setFullscreen(!window.Window.fullscreen);
}
});
}
const icon = await loadImageAsset("icon.png");
window.setIconFromImage(icon);
window.Window.setResizable(true);
window.Window.setAccelerated(true);
const renderer = createRenderer({ window });
const rendererRegistry = createRendererRegistry({ rendererProps: { errormessage: props } });
let activeRenderer: RendererInstance | null = rendererRegistry.switchTo("errormessage");
let rendererExit: { id: string; result: unknown } | null = null;
const requestRendererExit = () => {
const current = activeRenderer;
if (!current || rendererExit) return;
rendererExit = {
id: current.id,
result: current.getResult ? current.getResult() : undefined
};
renderer.requestStop();
};
window.on("keyDown", (e) => {
activeRenderer?.handleKey?.(e.key ?? null);
});
const baseWidth = 160;
const baseHeight = 120;
const viewWidth = 1280;
const viewHeight = 960;
const uiScale = 0.6;
const getLayout = createLayoutCalculator({
baseWidth,
baseHeight,
viewWidth,
viewHeight
});
const debugHud = createDebugHud(
options?.debugOptions ?? {
showGlobal: true,
showRenderer: true,
showCustom: true
}
);
const globalFps = createFpsCounter();
const rendererFps = createFpsCounter();
let lastFrameMs = Date.now();
const drawFrame = async (_ctx: CanvasRenderingContext2D, size: { width: number; height: number }) => {
const currentRenderer = activeRenderer!;
const { ctx } = renderer;
ctx.imageSmoothingEnabled = false;
const layout = getLayout(size);
const { width, height, contentScale, x, y } = layout;
const now = Date.now();
const deltaMs = now - lastFrameMs;
lastFrameMs = now;
globalFps.tick(now);
ctx.clearRect(0, 0, width, height);
await currentRenderer.render({ ctx, deltaMs, layout });
rendererFps.tick(now);
if (currentRenderer.shouldExit?.()) {
requestRendererExit();
}
ctx.save();
const uiOffsetX = (contentScale - contentScale * uiScale) * baseWidth * 0.5;
const uiOffsetY = (contentScale - contentScale * uiScale) * baseHeight * 0.5;
ctx.translate(x + uiOffsetX, y + uiOffsetY);
ctx.scale(contentScale * uiScale, contentScale * uiScale);
ctx.restore();
debugHud.draw(ctx, layout, {
global: [
`${globalFps.value.toFixed(2)} FPS`,
`${window.Window.display.name ? (process.env.MONITOR_SN ? window.Window.display.name.replaceAll((process.env.MONITOR_SN || "") + " ", "") : window.Window.display.name) : "unknown"} ${window.Window.display.frequency}hz [${process.env.SDL_VIDEODRIVER ?? "sdl2"}]`,
`activeRenderer: ${currentRenderer.id}`,
`${BUNVERS}`
],
renderer: {
id: currentRenderer.id,
label: currentRenderer.label,
stats: {
...(currentRenderer.getDebugStats ? currentRenderer.getDebugStats() : {}),
...(currentRenderer.getDebugHudStats ? currentRenderer.getDebugHudStats() : {})
},
fps: rendererFps.value
},
custom: {
greetdSocket: GREETD_SOCKET,
tty: isTTY,
cage: isCage
}
});
if (debugHudOptionsEnabled(options?.debugOptions ?? { showGlobal: true, showRenderer: true, showCustom: true })) {
drawDebugLogs(ctx, layout, now);
}
};
await renderer.run(drawFrame);
if (rendererExit) {
console.debug("[debug] error renderer exit requested", rendererExit);
}
activeRenderer?.unload();
}
function createDebugLogStream(filePath: string | undefined, debug: typeof console.debug) {
if (!filePath) return null;
try {
mkdirSync(dirname(filePath), { recursive: true });
return createWriteStream(filePath, { flags: "a" });
} catch (error) {
debug("[debug] failed to open debug log file", { filePath, error });
return null;
}
}

22
tsconfig.json Normal file
View File

@@ -0,0 +1,22 @@
{
"compilerOptions": {
"lib": ["ESNext"],
"target": "ESNext",
"module": "Preserve",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
"noUncheckedIndexedAccess": true,
"noImplicitOverride": true,
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

6
tty.sh Executable file
View File

@@ -0,0 +1,6 @@
#!/bin/bash
export SDL_VIDEODRIVER=kmsdrm
export SDL_KMSDRM_HWCURSOR=0
export SDL_HINT_RENDER_SCALE_QUALITY=0
export NODE_ENV=development
bun run src/ui/app.ts