initialize generic rms-software repository

Add the reusable RMS core application (server, web UI, plugins, tests, tools) with generic defaults, GPL licensing, and maintainer context documentation so deployments can consume this repo as software source independent of station-specific overlays.
This commit is contained in:
2026-03-16 03:31:08 +01:00
commit e1a4ce0b8b
58 changed files with 20611 additions and 0 deletions

View File

@@ -0,0 +1,601 @@
const fs = require("fs");
const path = require("path");
const { spawn } = require("child_process");
async function createPlugin(ctx) {
const state = {
pttActive: false,
audio: {
ffmpeg: null,
clients: new Set(),
running: false,
startedAt: null,
ownerUserId: null,
alsaDevice: null,
stopRequested: false,
lastError: null,
lastExit: null,
idleTimer: null
}
};
return {
async execute(action, input = {}) {
if (action === "pttDown") {
return pttSet(ctx, state, true, input);
}
if (action === "pttUp") {
return pttSet(ctx, state, false, input);
}
if (action === "pttStatus") {
return pttStatus(ctx, state);
}
if (action === "audioConnect") {
return audioConnect(ctx, state, input);
}
if (action === "backendStart") {
return audioConnect(ctx, state, input);
}
if (action === "audioDisconnect") {
return audioDisconnect(ctx, state, input);
}
if (action === "backendStop") {
return audioDisconnect(ctx, state, input);
}
if (action === "audioStatus") {
return audioStatus(ctx, state, input);
}
if (action === "backendStatus") {
return audioStatus(ctx, state, input);
}
if (action === "audioRegisterClient") {
return audioRegisterClient(ctx, state, input);
}
if (action === "backendRegisterClient") {
return audioRegisterClient(ctx, state, input);
}
if (action === "audioUnregisterClient") {
return audioUnregisterClient(ctx, state, input);
}
if (action === "backendUnregisterClient") {
return audioUnregisterClient(ctx, state, input);
}
if (action === "audioWriteChunk") {
return audioWriteChunk(ctx, state, input);
}
if (action === "backendWrite") {
return audioWriteChunk(ctx, state, input);
}
throw new Error(`Unknown action: ${action}`);
},
async getStatus() {
return {
ptt: pttStatus(ctx, state),
audio: audioStatus(ctx, state, {})
};
},
async stop() {
await stopAudioBackend(ctx, state, "plugin-stop");
},
async health() {
return { ok: true };
}
};
}
function cfg(ctx) {
const device = String(ctx.getSetting("device", ctx.env.MICROHAM_DEVICE || "/dev/rms-microham-u3")).trim() || "/dev/rms-microham-u3";
const pttCommandsEnabled = asBool(ctx.getSetting("pttCommandsEnabled", ctx.env.MICROHAM_PTT_COMMANDS_ENABLED || "false"));
const pttDownCommand = String(ctx.getSetting("pttDownCommand", ctx.env.MICROHAM_PTT_DOWN_CMD || "")).trim();
const pttUpCommand = String(ctx.getSetting("pttUpCommand", ctx.env.MICROHAM_PTT_UP_CMD || "")).trim();
const pttTimeoutMs = clampNum(ctx.getSetting("pttTimeoutMs", ctx.env.MICROHAM_PTT_TIMEOUT_MS || 5000), 1000, 60000, 5000);
const pttApplyBandState = asBool(ctx.getSetting("pttApplyBandState", ctx.env.MICROHAM_PTT_APPLY_BAND_STATE || "true"));
const pttRigctlModel = String(ctx.getSetting("pttRigctlModel", ctx.env.MICROHAM_PTT_RIGCTL_MODEL || "3023")).trim() || "3023";
const pttRigctlBaud = String(ctx.getSetting("pttRigctlBaud", ctx.env.MICROHAM_PTT_RIGCTL_BAUD || "19200")).trim() || "19200";
const pttRigctlSetConf = String(ctx.getSetting("pttRigctlSetConf", ctx.env.MICROHAM_PTT_RIGCTL_SETCONF || "rts_state=OFF,dtr_state=OFF")).trim() || "rts_state=OFF,dtr_state=OFF";
const audioEnabled = asBool(ctx.getSetting("audioEnabled", ctx.env.MICROHAM_AUDIO_ENABLED || "true"));
const audioAlsaDevice = String(ctx.getSetting("audioAlsaDevice", ctx.env.MICROHAM_AUDIO_ALSA_DEVICE || "plughw:CARD=CODEC,DEV=0")).trim() || "plughw:CARD=CODEC,DEV=0";
const audioInputMime = String(ctx.getSetting("audioInputMime", ctx.env.MICROHAM_AUDIO_INPUT_MIME || "webm")).trim().toLowerCase() === "ogg" ? "ogg" : "webm";
const audioStopOnDisconnect = asBool(ctx.getSetting("audioStopOnDisconnect", ctx.env.MICROHAM_AUDIO_STOP_ON_DISCONNECT || "true"));
const audioChunkMs = clampNum(ctx.getSetting("audioChunkMs", ctx.env.MICROHAM_AUDIO_CHUNK_MS || 100), 40, 2000, 100);
const audioSessionTimeoutMs = clampNum(ctx.getSetting("audioSessionTimeoutMs", ctx.env.MICROHAM_AUDIO_SESSION_TIMEOUT_MS || 120000), 1000, 3600000, 120000);
const audioFfmpegPath = String(ctx.getSetting("audioFfmpegPath", ctx.env.MICROHAM_AUDIO_FFMPEG_PATH || "")).trim();
const audioFfmpegExtraArgs = String(ctx.getSetting("audioFfmpegExtraArgs", ctx.env.MICROHAM_AUDIO_FFMPEG_EXTRA_ARGS || "")).trim();
return {
device,
pttCommandsEnabled,
pttDownCommand,
pttUpCommand,
pttTimeoutMs,
pttApplyBandState,
pttRigctlModel,
pttRigctlBaud,
pttRigctlSetConf,
audioEnabled,
audioAlsaDevice,
audioInputMime,
audioStopOnDisconnect,
audioChunkMs,
audioSessionTimeoutMs,
audioFfmpegPath,
audioFfmpegExtraArgs
};
}
function pttStatus(ctx, state) {
const c = cfg(ctx);
return {
active: Boolean(state.pttActive),
commandConfigured: Boolean(c.pttCommandsEnabled && c.pttDownCommand && c.pttUpCommand),
device: c.device,
enabled: c.pttCommandsEnabled
};
}
async function pttSet(ctx, state, down, input = {}) {
const c = cfg(ctx);
if (!c.pttCommandsEnabled) {
throw new Error("MICROHAM_PTT_COMMANDS_ENABLED must be true");
}
const useBatchDown = Boolean(down && c.pttApplyBandState);
const template = down ? c.pttDownCommand : c.pttUpCommand;
if (!template && !useBatchDown) {
throw new Error(down ? "MICROHAM_PTT_DOWN_CMD missing" : "MICROHAM_PTT_UP_CMD missing");
}
if (!/^\/dev\/[A-Za-z0-9._\/-]+$/.test(c.device)) {
throw new Error(`invalid microham device path: ${c.device}`);
}
let command = "";
if (useBatchDown) {
command = buildRigctlBatchPttDownCommand(c, input);
} else {
command = renderPttCommandTemplate(template, c.device, input);
}
const result = await ctx.commandRunner(command, { timeoutMs: c.pttTimeoutMs });
if (!result.ok) {
throw new Error(result.stderr || result.error || `ptt ${down ? "down" : "up"} command failed`);
}
state.pttActive = Boolean(down);
return {
ok: true,
active: state.pttActive,
direction: down ? "down" : "up"
};
}
function buildRigctlBatchPttDownCommand(c, input) {
const bandState = input && input.bandState && typeof input.bandState === "object" ? input.bandState : {};
const centerFreqHz = Number.isFinite(Number(bandState.centerFreqHz)) ? Math.floor(Number(bandState.centerFreqHz)) : null;
const startMod = String(bandState.startMod || "").trim().toLowerCase();
const rigMode = mapStartModToRigMode(startMod);
if (centerFreqHz === null || !rigMode) {
throw new Error("live frequency/mode unavailable for rigctl batch PTT down");
}
const model = String(c.pttRigctlModel || "3023").trim();
const baud = String(c.pttRigctlBaud || "19200").trim();
const setConf = String(c.pttRigctlSetConf || "rts_state=OFF,dtr_state=OFF").trim();
if (!/^\d+$/.test(model)) {
throw new Error(`invalid rigctl model: ${model}`);
}
if (!/^\d+$/.test(baud)) {
throw new Error(`invalid rigctl baud: ${baud}`);
}
if (!/^[A-Za-z0-9_=,.-]+$/.test(setConf)) {
throw new Error(`invalid rigctl set-conf: ${setConf}`);
}
return `printf '%s\\n' 'F ${centerFreqHz}' 'M ${rigMode} 0' 'T 1' | rigctl -m${model} -r ${c.device} -s ${baud} --set-conf=${setConf}`;
}
function renderPttCommandTemplate(template, device, input) {
const bandState = input && input.bandState && typeof input.bandState === "object" ? input.bandState : {};
const centerFreqHz = Number.isFinite(Number(bandState.centerFreqHz)) ? Math.floor(Number(bandState.centerFreqHz)) : null;
const startMod = String(bandState.startMod || "").trim().toLowerCase();
const rigMode = mapStartModToRigMode(startMod);
if (template.includes("{centerFreqHz}") && centerFreqHz === null) {
throw new Error("center frequency unavailable for PTT command");
}
if ((template.includes("{rigMode}") || template.includes("{mode}")) && !rigMode) {
throw new Error("start mode unavailable for PTT command");
}
const replacements = {
"{device}": device,
"{pttDevice}": device,
"{microhamDevice}": device,
"{centerFreqHz}": centerFreqHz !== null ? String(centerFreqHz) : "",
"{frequencyHz}": centerFreqHz !== null ? String(centerFreqHz) : "",
"{freqHz}": centerFreqHz !== null ? String(centerFreqHz) : "",
"{centerFreqKHz}": centerFreqHz !== null ? String(Math.floor(centerFreqHz / 1000)) : "",
"{startMod}": startMod,
"{rigMode}": rigMode,
"{mode}": rigMode
};
let command = String(template || "");
for (const [key, value] of Object.entries(replacements)) {
command = command.replaceAll(key, value);
}
return command;
}
function mapStartModToRigMode(startMod) {
const value = String(startMod || "").trim().toLowerCase();
if (!value) {
return "";
}
if (value === "usb") {
return "USB";
}
if (value === "lsb") {
return "LSB";
}
if (value === "am") {
return "AM";
}
if (value === "fm" || value === "wfm" || value === "nfm") {
return "FM";
}
if (value === "cw") {
return "CW";
}
if (value === "cwr") {
return "CWR";
}
return value.toUpperCase();
}
function audioStatus(ctx, state, input) {
const c = cfg(ctx);
const callerUserId = String((input && input.userId) || "").trim() || null;
let mode = "disconnected";
if (!c.audioEnabled) {
mode = "disabled";
} else if (state.audio.running) {
mode = "running";
} else if (state.audio.lastError) {
mode = "error";
}
return {
providerId: "rms.microham",
providerEnabled: true,
enabled: c.audioEnabled,
state: mode,
running: Boolean(state.audio.running),
clients: state.audio.clients.size,
ownerUserId: state.audio.ownerUserId || null,
ownerMatchesCaller: Boolean(state.audio.ownerUserId && callerUserId && state.audio.ownerUserId === callerUserId),
startedAt: state.audio.startedAt,
lastError: state.audio.lastError,
lastExit: state.audio.lastExit,
ffmpegPath: resolveFfmpegPath(c.audioFfmpegPath),
alsaDevice: state.audio.alsaDevice || c.audioAlsaDevice,
chunkMs: c.audioChunkMs,
wsPath: "/v1/openwebrx/plugin/audio/ws"
};
}
async function audioConnect(ctx, state, input) {
const userId = String((input && input.userId) || "").trim();
if (!userId) {
throw new Error("microham audio requires userId");
}
await ensureAudioBackendForOwner(ctx, state, userId, String((input && input.reason) || "api-connect") || "api-connect");
return { ok: true, audio: audioStatus(ctx, state, { userId }) };
}
async function audioDisconnect(ctx, state, input) {
await stopAudioBackend(ctx, state, String((input && input.reason) || "api-disconnect") || "api-disconnect");
return { ok: true, audio: audioStatus(ctx, state, { userId: String((input && input.userId) || "") }) };
}
async function audioRegisterClient(ctx, state, input) {
const ws = input && input.ws;
if (!ws) {
throw new Error("audioRegisterClient missing ws");
}
const userId = String((input && input.userId) || "").trim();
if (!userId) {
throw new Error("audioRegisterClient missing userId");
}
await ensureAudioBackendForOwner(ctx, state, userId, String((input && input.reason) || "ws-connect") || "ws-connect");
state.audio.clients.add(ws);
clearIdleTimer(state);
return { ok: true, clients: state.audio.clients.size };
}
async function audioUnregisterClient(ctx, state, input) {
const ws = input && input.ws;
if (ws) {
state.audio.clients.delete(ws);
}
const c = cfg(ctx);
if (state.audio.clients.size === 0 && c.audioStopOnDisconnect) {
await stopAudioBackend(ctx, state, String((input && input.reason) || "ws-disconnect") || "ws-disconnect");
} else if (state.audio.clients.size === 0) {
scheduleIdleStop(ctx, state);
}
return { ok: true, clients: state.audio.clients.size };
}
async function audioWriteChunk(ctx, state, input) {
const ws = input && input.ws;
const userId = String((input && input.userId) || "").trim();
if (!userId) {
return { ok: false, skipped: true };
}
if (!state.audio.running || !state.audio.ffmpeg || !state.audio.ffmpeg.stdin || state.audio.ffmpeg.stdin.destroyed) {
await ensureAudioBackendForOwner(ctx, state, userId, "ws-message");
}
if (ws && !state.audio.clients.has(ws)) {
state.audio.clients.add(ws);
}
const proc = state.audio.ffmpeg;
if (!proc || !proc.stdin || proc.stdin.destroyed) {
return { ok: false, skipped: true };
}
try {
proc.stdin.write(input && input.chunk ? input.chunk : Buffer.alloc(0));
} catch {
// ignore chunk failure
}
return { ok: true };
}
function scheduleIdleStop(ctx, state) {
clearIdleTimer(state);
const c = cfg(ctx);
state.audio.idleTimer = setTimeout(() => {
stopAudioBackend(ctx, state, "idle-timeout").catch(() => {});
}, c.audioSessionTimeoutMs);
if (state.audio.idleTimer && typeof state.audio.idleTimer.unref === "function") {
state.audio.idleTimer.unref();
}
}
function clearIdleTimer(state) {
if (!state.audio.idleTimer) {
return;
}
clearTimeout(state.audio.idleTimer);
state.audio.idleTimer = null;
}
async function ensureAudioBackendForOwner(ctx, state, ownerUserId, reason) {
const c = cfg(ctx);
if (!c.audioEnabled) {
throw new Error("MICROHAM_AUDIO_ENABLED=false");
}
if (state.audio.running) {
if (state.audio.ownerUserId && state.audio.ownerUserId !== ownerUserId) {
const hasClients = state.audio.clients.size > 0;
if (!hasClients) {
await stopAudioBackend(ctx, state, "owner-handover");
await waitMs(100);
}
}
if (state.audio.running && state.audio.ownerUserId && state.audio.ownerUserId !== ownerUserId) {
throw new Error("TX Audio wird bereits von einem anderen Benutzer verwendet");
}
state.audio.ownerUserId = ownerUserId;
clearIdleTimer(state);
return;
}
clearIdleTimer(state);
const startupErrors = [];
const candidates = [...new Set([c.audioAlsaDevice, "default", "plughw:0,0"].map((s) => String(s || "").trim()).filter(Boolean))];
for (const candidateDevice of candidates) {
state.audio.lastError = null;
let proc;
try {
proc = spawnAudioFfmpeg(c, candidateDevice);
} catch (error) {
startupErrors.push(`${candidateDevice}: ${String(error && error.message ? error.message : error)}`);
continue;
}
state.audio.ffmpeg = proc;
state.audio.running = true;
state.audio.startedAt = new Date().toISOString();
state.audio.ownerUserId = ownerUserId;
state.audio.alsaDevice = candidateDevice;
state.audio.stopRequested = false;
let stderrBuffer = "";
if (proc.stderr) {
proc.stderr.on("data", (chunk) => {
const text = String(chunk || "");
stderrBuffer = `${stderrBuffer}${text}`.slice(-4000);
if (!state.audio.stopRequested && text.trim()) {
state.audio.lastError = text.trim();
}
});
}
proc.on("error", (error) => {
if (!state.audio.stopRequested) {
state.audio.lastError = String(error && error.message ? error.message : error);
}
});
proc.on("close", (code, signal) => {
state.audio.lastExit = {
at: new Date().toISOString(),
code: Number.isFinite(Number(code)) ? Number(code) : null,
signal: signal || null,
stderr: stderrBuffer || null
};
state.audio.running = false;
state.audio.ffmpeg = null;
state.audio.startedAt = null;
state.audio.ownerUserId = null;
state.audio.alsaDevice = null;
state.audio.stopRequested = false;
clearIdleTimer(state);
for (const client of state.audio.clients) {
try {
client.close(1011, "audio backend closed");
} catch {
// ignore
}
}
state.audio.clients.clear();
});
await waitMs(180);
if (state.audio.running) {
ctx.emit("microham.audio.start", { reason, alsaDevice: candidateDevice });
return;
}
startupErrors.push(`${candidateDevice}: ${state.audio.lastError || "start failed"}`);
}
state.audio.alsaDevice = null;
throw new Error(startupErrors.length > 0 ? startupErrors.join(" | ") : "microHAM Audio Backend konnte nicht gestartet werden");
}
async function stopAudioBackend(ctx, state, reason) {
clearIdleTimer(state);
const proc = state.audio.ffmpeg;
state.audio.stopRequested = true;
if (!proc || !state.audio.running) {
resetAudioState(state);
return;
}
for (const client of state.audio.clients) {
try {
client.close(1000, "audio disconnected");
} catch {
// ignore
}
}
state.audio.clients.clear();
try {
if (proc.stdin && !proc.stdin.destroyed) {
proc.stdin.end();
}
} catch {
// ignore
}
await waitMs(150);
if (state.audio.running && !proc.killed) {
try {
proc.kill("SIGTERM");
} catch {
// ignore
}
}
state.audio.lastError = null;
ctx.emit("microham.audio.stop", { reason });
}
function resetAudioState(state) {
state.audio.running = false;
state.audio.ffmpeg = null;
state.audio.ownerUserId = null;
state.audio.alsaDevice = null;
state.audio.stopRequested = false;
state.audio.lastError = null;
for (const client of state.audio.clients) {
try {
client.close(1000, "audio disconnected");
} catch {
// ignore
}
}
state.audio.clients.clear();
}
function spawnAudioFfmpeg(c, alsaDevice) {
const ffmpegPath = resolveFfmpegPath(c.audioFfmpegPath);
if (!ffmpegPath) {
throw new Error("ffmpeg binary not found (set MICROHAM_AUDIO_FFMPEG_PATH)");
}
const args = [
"-hide_banner",
"-loglevel", "warning",
"-fflags", "+nobuffer",
"-flags", "low_delay",
"-thread_queue_size", "1024",
"-f", c.audioInputMime,
"-i", "pipe:0",
"-ac", "2",
"-f", "alsa",
alsaDevice
];
const extra = splitCommand(c.audioFfmpegExtraArgs);
if (extra.length > 0) {
args.splice(args.length - 3, 0, ...extra);
}
return spawn(ffmpegPath, args, {
stdio: ["pipe", "pipe", "pipe"],
cwd: process.cwd(),
env: process.env
});
}
function resolveFfmpegPath(configured) {
const value = String(configured || "").trim();
const linuxCandidates = ["/usr/bin/ffmpeg", "/usr/local/bin/ffmpeg", "/bin/ffmpeg"];
if (value) {
if (value.includes(path.sep) || value.includes("/")) {
if (fs.existsSync(value)) {
return value;
}
const fallbackName = path.basename(value) || "ffmpeg";
for (const candidate of linuxCandidates) {
if (fs.existsSync(candidate) && path.basename(candidate) === fallbackName) {
return candidate;
}
}
return fallbackName;
}
if (process.platform === "linux") {
for (const candidate of linuxCandidates) {
if (fs.existsSync(candidate) && path.basename(candidate) === value) {
return candidate;
}
}
}
return value;
}
if (process.platform === "linux") {
for (const candidate of linuxCandidates) {
if (fs.existsSync(candidate)) {
return candidate;
}
}
}
return "ffmpeg";
}
function splitCommand(commandString) {
return String(commandString || "").match(/(?:[^\s"]+|"[^"]*")+/g)?.map((part) => part.replace(/^"|"$/g, "")) || [];
}
function clampNum(value, min, max, fallback) {
const n = Number(value);
if (!Number.isFinite(n)) {
return fallback;
}
return Math.max(min, Math.min(max, Math.floor(n)));
}
function asBool(value) {
const v = String(value || "").trim().toLowerCase();
return v === "1" || v === "true" || v === "yes" || v === "on";
}
async function waitMs(ms) {
await new Promise((resolve) => setTimeout(resolve, Number(ms) || 0));
}
module.exports = {
createPlugin
};