initial deployment v1.0
This commit is contained in:
273
scripts/import-personalities.js
Normal file
273
scripts/import-personalities.js
Normal file
@@ -0,0 +1,273 @@
|
||||
#!/usr/bin/env node
|
||||
/**
|
||||
* Bulk import personalities from a JSON manifest + directory of .prs files.
|
||||
*
|
||||
* Usage:
|
||||
* node scripts/import-personalities.js <json-file> <prs-dir> [options]
|
||||
*
|
||||
* Options:
|
||||
* --dry-run Print what would be imported without writing to DB
|
||||
* --creator Creator handle to tag all imports with (default: "ETC Library")
|
||||
* --skip-existing Skip fixtures already in DB (matched by prs_name + manufacturer)
|
||||
*
|
||||
* Examples:
|
||||
* node scripts/import-personalities.js personalities.json ./prs
|
||||
* node scripts/import-personalities.js personalities.json ./prs --dry-run
|
||||
* node scripts/import-personalities.js personalities.json ./prs --creator "Raine"
|
||||
*/
|
||||
|
||||
import { readFileSync, existsSync } from 'fs';
|
||||
import { join, resolve } from 'path';
|
||||
import { randomBytes } from 'crypto';
|
||||
import Database from 'better-sqlite3';
|
||||
import bcrypt from 'bcryptjs';
|
||||
import { config } from 'dotenv';
|
||||
import { fileURLToPath } from 'url';
|
||||
import { dirname } from 'path';
|
||||
import { nanoid } from 'nanoid';
|
||||
|
||||
const __dirname = dirname(fileURLToPath(import.meta.url));
|
||||
config({ path: join(__dirname, '..', '.env') });
|
||||
|
||||
// ── Args ─────────────────────────────────────────────────────────
|
||||
const args = process.argv.slice(2);
|
||||
const jsonPath = args[0];
|
||||
const prsDir = args[1];
|
||||
const DRY_RUN = args.includes('--dry-run');
|
||||
const SKIP_EXISTING = args.includes('--skip-existing');
|
||||
const creatorIdx = args.indexOf('--creator');
|
||||
const CREATOR = creatorIdx !== -1 ? args[creatorIdx + 1] : 'ETC Library';
|
||||
|
||||
if (!jsonPath || !prsDir) {
|
||||
console.error('Usage: node scripts/import-personalities.js <json-file> <prs-dir> [--dry-run] [--skip-existing] [--creator <name>]');
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!existsSync(jsonPath)) {
|
||||
console.error(`JSON file not found: ${jsonPath}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
if (!existsSync(prsDir)) {
|
||||
console.error(`PRS directory not found: ${prsDir}`);
|
||||
process.exit(1);
|
||||
}
|
||||
|
||||
// ── Constants ────────────────────────────────────────────────────
|
||||
const FILE_SIZE = 540; // ETC PRS files are always 540 bytes
|
||||
const NAME_LEN = 12;
|
||||
const NAME_OFFSET = 0;
|
||||
|
||||
function readPrsName(bytes) {
|
||||
const raw = bytes.slice(NAME_OFFSET, NAME_OFFSET + NAME_LEN);
|
||||
let name = '';
|
||||
for (const b of raw) {
|
||||
if (b === 0) break;
|
||||
name += String.fromCharCode(b);
|
||||
}
|
||||
return name.trim();
|
||||
}
|
||||
|
||||
// ── DB setup ─────────────────────────────────────────────────────
|
||||
const dbPath = process.env.DATABASE_URL ?? './dev.db';
|
||||
const db = new Database(dbPath);
|
||||
db.pragma('journal_mode = WAL');
|
||||
db.pragma('foreign_keys = ON');
|
||||
|
||||
// Ensure base table exists (safe no-op if already present)
|
||||
db.exec(`
|
||||
CREATE TABLE IF NOT EXISTS personalities (
|
||||
id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
prs_name TEXT,
|
||||
file_name TEXT,
|
||||
notes TEXT,
|
||||
data BLOB NOT NULL,
|
||||
manufacturer TEXT,
|
||||
tags TEXT NOT NULL DEFAULT '[]',
|
||||
channel_count INTEGER NOT NULL,
|
||||
created_at TEXT NOT NULL,
|
||||
creator_handle TEXT,
|
||||
view_count INTEGER NOT NULL DEFAULT 0,
|
||||
owner_token_hash TEXT NOT NULL,
|
||||
deleted_at TEXT DEFAULT NULL
|
||||
)
|
||||
`);
|
||||
|
||||
// Add prs_name column if it doesn't exist yet
|
||||
const cols = db.prepare('PRAGMA table_info(personalities)').all().map(r => r.name);
|
||||
if (!cols.includes('prs_name')) {
|
||||
db.exec('ALTER TABLE personalities ADD COLUMN prs_name TEXT DEFAULT NULL');
|
||||
console.log(' ℹ Added prs_name column to existing DB.');
|
||||
}
|
||||
|
||||
const insertStmt = db.prepare(`
|
||||
INSERT INTO personalities
|
||||
(id, name, prs_name, file_name, notes, data, manufacturer, tags,
|
||||
channel_count, created_at, creator_handle, owner_token_hash)
|
||||
VALUES
|
||||
(@id, @name, @prs_name, @file_name, @notes, @data, @manufacturer, @tags,
|
||||
@channel_count, @created_at, @creator_handle, @owner_token_hash)
|
||||
`);
|
||||
|
||||
const existsStmt = db.prepare(`
|
||||
SELECT id FROM personalities
|
||||
WHERE prs_name = ? AND manufacturer = ? AND deleted_at IS NULL
|
||||
LIMIT 1
|
||||
`);
|
||||
|
||||
// ── Slug helper ──────────────────────────────────────────────────
|
||||
function makeSlug(str) {
|
||||
return str.toLowerCase()
|
||||
.replace(/[^a-z0-9]+/g, '-')
|
||||
.replace(/^-+|-+$/g, '')
|
||||
.slice(0, 80);
|
||||
}
|
||||
|
||||
// ── Load manifest ─────────────────────────────────────────────────
|
||||
const manifest = JSON.parse(readFileSync(jsonPath, 'utf8'));
|
||||
console.log(`\nETC PRS Bulk Import`);
|
||||
console.log(`${'─'.repeat(50)}`);
|
||||
console.log(` Manifest: ${jsonPath}`);
|
||||
console.log(` PRS dir: ${resolve(prsDir)}`);
|
||||
console.log(` Database: ${dbPath}`);
|
||||
console.log(` Creator: ${CREATOR}`);
|
||||
console.log(` Dry run: ${DRY_RUN ? 'YES — nothing will be written' : 'no'}`);
|
||||
console.log(` Total packs: ${manifest.total_packs}`);
|
||||
console.log(` Total PRS: ${manifest.total_fixtures}`);
|
||||
console.log(`${'─'.repeat(50)}\n`);
|
||||
|
||||
// ── Import ───────────────────────────────────────────────────────
|
||||
let imported = 0;
|
||||
let skipped = 0;
|
||||
let missing = 0;
|
||||
let errors = 0;
|
||||
let existing = 0;
|
||||
|
||||
// Use a single bcrypt hash for all imports (avoids 160 × slow bcrypt calls)
|
||||
// Each entry gets a unique nanoid token; we hash one representative value.
|
||||
// In practice these are bulk "library" entries — owner-token deletion is
|
||||
// less relevant, but we still store a valid hash so the schema stays consistent.
|
||||
const sharedTokenBase = nanoid(32);
|
||||
const sharedTokenHash = DRY_RUN ? 'dryrun' : bcrypt.hashSync(sharedTokenBase, 10);
|
||||
|
||||
const doImport = db.transaction((records) => {
|
||||
for (const r of records) {
|
||||
insertStmt.run(r);
|
||||
}
|
||||
});
|
||||
|
||||
const batchRecords = [];
|
||||
|
||||
for (const pack of manifest.packs) {
|
||||
const { manufacturer, category, fixtures, prs_files } = pack;
|
||||
|
||||
if (!fixtures || fixtures.length === 0) {
|
||||
console.log(` ⚪ ${manufacturer} — no fixtures, skipping pack`);
|
||||
continue;
|
||||
}
|
||||
|
||||
console.log(` 📦 ${pack.name} (${fixtures.length} fixtures)`);
|
||||
|
||||
for (const fixture of fixtures) {
|
||||
const { fixture_name, channels, mode_info, prs_file } = fixture;
|
||||
|
||||
// Find the matching .prs filename from prs_files list
|
||||
const prsFileName = prs_files.find(f =>
|
||||
f.toLowerCase().includes(prs_file.toLowerCase())
|
||||
);
|
||||
|
||||
if (!prsFileName) {
|
||||
console.log(` ✗ ${fixture_name} — no matching PRS file for key "${prs_file}"`);
|
||||
missing++;
|
||||
continue;
|
||||
}
|
||||
|
||||
const prsPath = join(prsDir, prsFileName);
|
||||
if (!existsSync(prsPath)) {
|
||||
console.log(` ✗ ${fixture_name} — file not found: ${prsFileName}`);
|
||||
missing++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read and validate binary
|
||||
const data = readFileSync(prsPath);
|
||||
if (data.length !== FILE_SIZE) {
|
||||
console.log(` ✗ ${fixture_name} — invalid file size ${data.length} (expected ${FILE_SIZE})`);
|
||||
errors++;
|
||||
continue;
|
||||
}
|
||||
|
||||
// Read PRS name from binary
|
||||
const prsName = readPrsName(data);
|
||||
|
||||
// Check for existing entry
|
||||
if (SKIP_EXISTING) {
|
||||
const dup = existsStmt.get(prsName, manufacturer);
|
||||
if (dup) {
|
||||
console.log(` ~ ${fixture_name} — already in DB, skipping`);
|
||||
existing++;
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
// Build display name: "Fixture Name (mode_info)" if mode_info available
|
||||
const displayName = mode_info
|
||||
? `${fixture_name} (${mode_info})`
|
||||
: fixture_name;
|
||||
|
||||
// Tags: mode_info + category
|
||||
const tags = [];
|
||||
if (mode_info) tags.push(mode_info.toLowerCase());
|
||||
if (category) tags.push(category.toLowerCase());
|
||||
|
||||
const channelCount = channels ?? data[0x0d]; // use JSON value or read from binary
|
||||
const now = new Date().toISOString();
|
||||
const id = nanoid(10);
|
||||
|
||||
const record = {
|
||||
id,
|
||||
name: displayName.slice(0, 120),
|
||||
prs_name: prsName.slice(0, NAME_LEN),
|
||||
file_name: prsFileName,
|
||||
notes: '',
|
||||
data: data,
|
||||
manufacturer: manufacturer,
|
||||
tags: JSON.stringify(tags),
|
||||
channel_count: channelCount,
|
||||
created_at: now,
|
||||
creator_handle: CREATOR,
|
||||
owner_token_hash: sharedTokenHash,
|
||||
};
|
||||
|
||||
if (DRY_RUN) {
|
||||
console.log(` ✓ [DRY] ${displayName} — ${channelCount}ch — PRS: ${prsName} — ${prsFileName}`);
|
||||
} else {
|
||||
batchRecords.push(record);
|
||||
console.log(` ✓ ${displayName} — ${channelCount}ch — PRS: ${prsName}`);
|
||||
}
|
||||
|
||||
imported++;
|
||||
}
|
||||
}
|
||||
|
||||
// Write all records in a single transaction
|
||||
if (!DRY_RUN && batchRecords.length > 0) {
|
||||
doImport(batchRecords);
|
||||
}
|
||||
|
||||
// ── Summary ───────────────────────────────────────────────────────
|
||||
console.log(`\n${'─'.repeat(50)}`);
|
||||
if (DRY_RUN) {
|
||||
console.log(` DRY RUN — no changes made to database`);
|
||||
console.log(` Would import: ${imported}`);
|
||||
} else {
|
||||
console.log(` ✓ Imported: ${imported}`);
|
||||
}
|
||||
if (existing > 0) console.log(` ~ Skipped (existing): ${existing}`);
|
||||
if (skipped > 0) console.log(` ⚪ Skipped: ${skipped}`);
|
||||
if (missing > 0) console.log(` ✗ Missing: ${missing}`);
|
||||
if (errors > 0) console.log(` ✗ Errors: ${errors}`);
|
||||
console.log(`${'─'.repeat(50)}\n`);
|
||||
|
||||
db.close();
|
||||
Reference in New Issue
Block a user