diff options
Diffstat (limited to 'src')
| -rw-r--r-- | src/server/anki/index.ts | 9 | ||||
| -rw-r--r-- | src/server/anki/parser.test.ts | 526 | ||||
| -rw-r--r-- | src/server/anki/parser.ts | 374 |
3 files changed, 909 insertions, 0 deletions
diff --git a/src/server/anki/index.ts b/src/server/anki/index.ts new file mode 100644 index 0000000..67e81de --- /dev/null +++ b/src/server/anki/index.ts @@ -0,0 +1,9 @@ +export { + type AnkiCard, + type AnkiDeck, + type AnkiModel, + type AnkiNote, + type AnkiPackage, + listAnkiPackageContents, + parseAnkiPackage, +} from "./parser.js"; diff --git a/src/server/anki/parser.test.ts b/src/server/anki/parser.test.ts new file mode 100644 index 0000000..61a6832 --- /dev/null +++ b/src/server/anki/parser.test.ts @@ -0,0 +1,526 @@ +import { randomBytes } from "node:crypto"; +import { mkdir, rm, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { DatabaseSync } from "node:sqlite"; +import { deflateRawSync } from "node:zlib"; +import { afterAll, beforeAll, describe, expect, it } from "vitest"; +import { + type AnkiPackage, + listAnkiPackageContents, + parseAnkiPackage, +} from "./parser.js"; + +/** + * Create a minimal ZIP file with the given entries + */ +function createZip(entries: Map<string, Buffer>): Buffer { + const chunks: Buffer[] = []; + const centralDirectory: Buffer[] = []; + let offset = 0; + + for (const [name, data] of entries) { + const nameBuffer = Buffer.from(name, "utf8"); + const compressedData = deflateRawSync(data); + + // Local file header + const localHeader = Buffer.alloc(30 + nameBuffer.length); + localHeader.writeUInt32LE(0x04034b50, 0); // signature + localHeader.writeUInt16LE(20, 4); // version needed + localHeader.writeUInt16LE(0, 6); // flags + localHeader.writeUInt16LE(8, 8); // compression method (deflate) + localHeader.writeUInt16LE(0, 10); // mod time + localHeader.writeUInt16LE(0, 12); // mod date + localHeader.writeUInt32LE(0, 14); // crc32 (not validated in our parser) + localHeader.writeUInt32LE(compressedData.length, 18); // compressed size + localHeader.writeUInt32LE(data.length, 22); // uncompressed size + localHeader.writeUInt16LE(nameBuffer.length, 26); // file name length + localHeader.writeUInt16LE(0, 28); // extra field length + nameBuffer.copy(localHeader, 30); + + // Central directory entry + const centralEntry = Buffer.alloc(46 + nameBuffer.length); + centralEntry.writeUInt32LE(0x02014b50, 0); // signature + centralEntry.writeUInt16LE(20, 4); // version made by + centralEntry.writeUInt16LE(20, 6); // version needed + centralEntry.writeUInt16LE(0, 8); // flags + centralEntry.writeUInt16LE(8, 10); // compression method + centralEntry.writeUInt16LE(0, 12); // mod time + centralEntry.writeUInt16LE(0, 14); // mod date + centralEntry.writeUInt32LE(0, 16); // crc32 + centralEntry.writeUInt32LE(compressedData.length, 20); // compressed size + centralEntry.writeUInt32LE(data.length, 24); // uncompressed size + centralEntry.writeUInt16LE(nameBuffer.length, 28); // file name length + centralEntry.writeUInt16LE(0, 30); // extra field length + centralEntry.writeUInt16LE(0, 32); // comment length + centralEntry.writeUInt16LE(0, 34); // disk number + centralEntry.writeUInt16LE(0, 36); // internal attributes + centralEntry.writeUInt32LE(0, 38); // external attributes + centralEntry.writeUInt32LE(offset, 42); // offset of local header + nameBuffer.copy(centralEntry, 46); + + centralDirectory.push(centralEntry); + chunks.push(localHeader, compressedData); + offset += localHeader.length + compressedData.length; + } + + // Central directory + const centralDirOffset = offset; + const centralDirBuffer = Buffer.concat(centralDirectory); + chunks.push(centralDirBuffer); + + // End of central directory + const endRecord = Buffer.alloc(22); + endRecord.writeUInt32LE(0x06054b50, 0); // signature + endRecord.writeUInt16LE(0, 4); // disk number + endRecord.writeUInt16LE(0, 6); // disk with central dir + endRecord.writeUInt16LE(entries.size, 8); // entries on this disk + endRecord.writeUInt16LE(entries.size, 10); // total entries + endRecord.writeUInt32LE(centralDirBuffer.length, 12); // central dir size + endRecord.writeUInt32LE(centralDirOffset, 16); // central dir offset + endRecord.writeUInt16LE(0, 20); // comment length + chunks.push(endRecord); + + return Buffer.concat(chunks); +} + +/** + * Create a test Anki SQLite database + */ +function createTestAnkiDb(dbPath: string): void { + const db = new DatabaseSync(dbPath); + + // Create tables + db.exec(` + CREATE TABLE col ( + id INTEGER PRIMARY KEY, + crt INTEGER NOT NULL, + mod INTEGER NOT NULL, + scm INTEGER NOT NULL, + ver INTEGER NOT NULL, + dty INTEGER NOT NULL, + usn INTEGER NOT NULL, + ls INTEGER NOT NULL, + conf TEXT NOT NULL, + models TEXT NOT NULL, + decks TEXT NOT NULL, + dconf TEXT NOT NULL, + tags TEXT NOT NULL + ) + `); + + db.exec(` + CREATE TABLE notes ( + id INTEGER PRIMARY KEY, + guid TEXT NOT NULL, + mid INTEGER NOT NULL, + mod INTEGER NOT NULL, + usn INTEGER NOT NULL, + tags TEXT NOT NULL, + flds TEXT NOT NULL, + sfld TEXT NOT NULL, + csum INTEGER NOT NULL, + flags INTEGER NOT NULL, + data TEXT NOT NULL + ) + `); + + db.exec(` + CREATE TABLE cards ( + id INTEGER PRIMARY KEY, + nid INTEGER NOT NULL, + did INTEGER NOT NULL, + ord INTEGER NOT NULL, + mod INTEGER NOT NULL, + usn INTEGER NOT NULL, + type INTEGER NOT NULL, + queue INTEGER NOT NULL, + due INTEGER NOT NULL, + ivl INTEGER NOT NULL, + factor INTEGER NOT NULL, + reps INTEGER NOT NULL, + lapses INTEGER NOT NULL, + left INTEGER NOT NULL, + odue INTEGER NOT NULL, + odid INTEGER NOT NULL, + flags INTEGER NOT NULL, + data TEXT NOT NULL + ) + `); + + // Insert collection data + const decks = { + "1": { id: 1, name: "Default", desc: "" }, + "1234567890123": { + id: 1234567890123, + name: "Test Deck", + desc: "A test deck", + }, + }; + + const models = { + "9876543210987": { + id: 9876543210987, + name: "Basic", + flds: [{ name: "Front" }, { name: "Back" }], + tmpls: [{ name: "Card 1", qfmt: "{{Front}}", afmt: "{{Back}}" }], + }, + }; + + const insertCol = db.prepare(` + INSERT INTO col (id, crt, mod, scm, ver, dty, usn, ls, conf, models, decks, dconf, tags) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `); + insertCol.run( + 1, + 1600000000, + 1600000001000, + 1600000000000, + 11, + 0, + -1, + 0, + "{}", + JSON.stringify(models), + JSON.stringify(decks), + "{}", + "{}", + ); + + // Insert test notes + const insertNote = db.prepare(` + INSERT INTO notes (id, guid, mid, mod, usn, tags, flds, sfld, csum, flags, data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `); + + // Note 1: Simple card + insertNote.run( + 1000000000001, + "abc123", + 9876543210987, + 1600000001, + -1, + " vocabulary test ", + "Hello\x1fWorld", + "Hello", + 12345, + 0, + "", + ); + + // Note 2: Card with multiple tags + insertNote.run( + 1000000000002, + "def456", + 9876543210987, + 1600000002, + -1, + " japanese kanji n5 ", + "日本語\x1fJapanese", + "日本語", + 67890, + 0, + "", + ); + + // Note 3: Card with no tags + insertNote.run( + 1000000000003, + "ghi789", + 9876543210987, + 1600000003, + -1, + "", + "Question\x1fAnswer", + "Question", + 11111, + 0, + "", + ); + + // Insert test cards + const insertCard = db.prepare(` + INSERT INTO cards (id, nid, did, ord, mod, usn, type, queue, due, ivl, factor, reps, lapses, left, odue, odid, flags, data) + VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) + `); + + // Card for note 1 (new card) + insertCard.run( + 2000000000001, + 1000000000001, + 1234567890123, + 0, + 1600000001, + -1, + 0, + 0, + 1, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + 0, + "", + ); + + // Card for note 2 (review card) + insertCard.run( + 2000000000002, + 1000000000002, + 1234567890123, + 0, + 1600000002, + -1, + 2, + 2, + 100, + 30, + 2500, + 5, + 1, + 0, + 0, + 0, + 0, + "", + ); + + // Card for note 3 (learning card) + insertCard.run( + 2000000000003, + 1000000000003, + 1234567890123, + 0, + 1600000003, + -1, + 1, + 1, + 1600100000, + 1, + 2500, + 1, + 0, + 1001, + 0, + 0, + 0, + "", + ); + + db.close(); +} + +describe("Anki Parser", () => { + let tempDir: string; + let testApkgPath: string; + + beforeAll(async () => { + // Create temp directory + tempDir = join(tmpdir(), `kioku-test-${randomBytes(8).toString("hex")}`); + await mkdir(tempDir, { recursive: true }); + + // Create test database + const dbPath = join(tempDir, "collection.anki2"); + createTestAnkiDb(dbPath); + + // Read the database file + const { readFile } = await import("node:fs/promises"); + const dbBuffer = await readFile(dbPath); + + // Create media file (empty JSON object) + const mediaBuffer = Buffer.from("{}", "utf8"); + + // Create ZIP with database and media + const zipEntries = new Map<string, Buffer>(); + zipEntries.set("collection.anki2", dbBuffer); + zipEntries.set("media", mediaBuffer); + + const zipBuffer = createZip(zipEntries); + + // Write the .apkg file + testApkgPath = join(tempDir, "test.apkg"); + await writeFile(testApkgPath, zipBuffer); + }); + + afterAll(async () => { + // Clean up + await rm(tempDir, { recursive: true, force: true }); + }); + + describe("listAnkiPackageContents", () => { + it("should list files in the package", async () => { + const files = await listAnkiPackageContents(testApkgPath); + + expect(files).toContain("collection.anki2"); + expect(files).toContain("media"); + }); + }); + + describe("parseAnkiPackage", () => { + let result: AnkiPackage; + + beforeAll(async () => { + result = await parseAnkiPackage(testApkgPath); + }); + + it("should parse decks correctly", () => { + expect(result.decks.length).toBe(2); + + const testDeck = result.decks.find((d) => d.name === "Test Deck"); + expect(testDeck).toBeDefined(); + expect(testDeck?.description).toBe("A test deck"); + + const defaultDeck = result.decks.find((d) => d.name === "Default"); + expect(defaultDeck).toBeDefined(); + }); + + it("should parse models correctly", () => { + expect(result.models.length).toBe(1); + + const basicModel = result.models[0]; + expect(basicModel).toBeDefined(); + expect(basicModel?.name).toBe("Basic"); + expect(basicModel?.fields).toEqual(["Front", "Back"]); + expect(basicModel?.templates.length).toBe(1); + expect(basicModel?.templates[0]?.name).toBe("Card 1"); + expect(basicModel?.templates[0]?.qfmt).toBe("{{Front}}"); + expect(basicModel?.templates[0]?.afmt).toBe("{{Back}}"); + }); + + it("should parse notes correctly", () => { + expect(result.notes.length).toBe(3); + + // Note 1 + const note1 = result.notes.find((n) => n.guid === "abc123"); + expect(note1).toBeDefined(); + expect(note1?.fields).toEqual(["Hello", "World"]); + expect(note1?.tags).toEqual(["vocabulary", "test"]); + expect(note1?.sfld).toBe("Hello"); + + // Note 2 + const note2 = result.notes.find((n) => n.guid === "def456"); + expect(note2).toBeDefined(); + expect(note2?.fields).toEqual(["日本語", "Japanese"]); + expect(note2?.tags).toEqual(["japanese", "kanji", "n5"]); + + // Note 3 (no tags) + const note3 = result.notes.find((n) => n.guid === "ghi789"); + expect(note3).toBeDefined(); + expect(note3?.tags).toEqual([]); + }); + + it("should parse cards correctly", () => { + expect(result.cards.length).toBe(3); + + // New card + const card1 = result.cards.find((c) => c.nid === 1000000000001); + expect(card1).toBeDefined(); + expect(card1?.type).toBe(0); // new + expect(card1?.reps).toBe(0); + + // Review card + const card2 = result.cards.find((c) => c.nid === 1000000000002); + expect(card2).toBeDefined(); + expect(card2?.type).toBe(2); // review + expect(card2?.ivl).toBe(30); + expect(card2?.reps).toBe(5); + expect(card2?.lapses).toBe(1); + + // Learning card + const card3 = result.cards.find((c) => c.nid === 1000000000003); + expect(card3).toBeDefined(); + expect(card3?.type).toBe(1); // learning + }); + + it("should throw error for non-existent file", async () => { + await expect(parseAnkiPackage("/non/existent/file.apkg")).rejects.toThrow( + "File not found", + ); + }); + + it("should throw error for invalid package without database", async () => { + // Create a ZIP without a database + const zipEntries = new Map<string, Buffer>(); + zipEntries.set("media", Buffer.from("{}", "utf8")); + const invalidZip = createZip(zipEntries); + + const invalidPath = join(tempDir, "invalid.apkg"); + await writeFile(invalidPath, invalidZip); + + await expect(parseAnkiPackage(invalidPath)).rejects.toThrow( + "No Anki database found", + ); + }); + }); + + describe("ZIP extraction", () => { + it("should handle uncompressed entries", async () => { + // Create a ZIP with uncompressed entries + const name = Buffer.from("test.txt", "utf8"); + const data = Buffer.from("Hello, World!", "utf8"); + + // Local file header (uncompressed) + const localHeader = Buffer.alloc(30 + name.length); + localHeader.writeUInt32LE(0x04034b50, 0); + localHeader.writeUInt16LE(20, 4); + localHeader.writeUInt16LE(0, 6); + localHeader.writeUInt16LE(0, 8); // no compression + localHeader.writeUInt16LE(0, 10); + localHeader.writeUInt16LE(0, 12); + localHeader.writeUInt32LE(0, 14); + localHeader.writeUInt32LE(data.length, 18); + localHeader.writeUInt32LE(data.length, 22); + localHeader.writeUInt16LE(name.length, 26); + localHeader.writeUInt16LE(0, 28); + name.copy(localHeader, 30); + + // Central directory + const centralEntry = Buffer.alloc(46 + name.length); + centralEntry.writeUInt32LE(0x02014b50, 0); + centralEntry.writeUInt16LE(20, 4); + centralEntry.writeUInt16LE(20, 6); + centralEntry.writeUInt16LE(0, 8); + centralEntry.writeUInt16LE(0, 10); + centralEntry.writeUInt16LE(0, 12); + centralEntry.writeUInt16LE(0, 14); + centralEntry.writeUInt32LE(0, 16); + centralEntry.writeUInt32LE(data.length, 20); + centralEntry.writeUInt32LE(data.length, 24); + centralEntry.writeUInt16LE(name.length, 28); + centralEntry.writeUInt16LE(0, 30); + centralEntry.writeUInt16LE(0, 32); + centralEntry.writeUInt16LE(0, 34); + centralEntry.writeUInt16LE(0, 36); + centralEntry.writeUInt32LE(0, 38); + centralEntry.writeUInt32LE(0, 42); + name.copy(centralEntry, 46); + + // End of central directory + const endRecord = Buffer.alloc(22); + endRecord.writeUInt32LE(0x06054b50, 0); + endRecord.writeUInt16LE(0, 4); + endRecord.writeUInt16LE(0, 6); + endRecord.writeUInt16LE(1, 8); + endRecord.writeUInt16LE(1, 10); + endRecord.writeUInt32LE(centralEntry.length, 12); + endRecord.writeUInt32LE(localHeader.length + data.length, 16); + endRecord.writeUInt16LE(0, 20); + + const zipBuffer = Buffer.concat([ + localHeader, + data, + centralEntry, + endRecord, + ]); + + const testPath = join(tempDir, "uncompressed.zip"); + await writeFile(testPath, zipBuffer); + + const files = await listAnkiPackageContents(testPath); + expect(files).toContain("test.txt"); + }); + }); +}); diff --git a/src/server/anki/parser.ts b/src/server/anki/parser.ts new file mode 100644 index 0000000..c317ce7 --- /dev/null +++ b/src/server/anki/parser.ts @@ -0,0 +1,374 @@ +import { randomBytes } from "node:crypto"; +import { existsSync } from "node:fs"; +import { mkdir, open, rm, writeFile } from "node:fs/promises"; +import { tmpdir } from "node:os"; +import { join } from "node:path"; +import { DatabaseSync } from "node:sqlite"; +import { createInflateRaw } from "node:zlib"; + +/** + * Represents a note from an Anki database + */ +export interface AnkiNote { + id: number; + guid: string; + mid: number; // model/notetype id + mod: number; + tags: string[]; + fields: string[]; // fields separated by 0x1f in the database + sfld: string; // sort field +} + +/** + * Represents a card from an Anki database + */ +export interface AnkiCard { + id: number; + nid: number; // note id + did: number; // deck id + ord: number; // ordinal (which template/cloze) + mod: number; + type: number; // 0=new, 1=learning, 2=review, 3=relearning + queue: number; + due: number; + ivl: number; // interval + factor: number; + reps: number; + lapses: number; +} + +/** + * Represents a deck from an Anki database + */ +export interface AnkiDeck { + id: number; + name: string; + description: string; +} + +/** + * Represents a model (note type) from an Anki database + */ +export interface AnkiModel { + id: number; + name: string; + fields: string[]; + templates: { + name: string; + qfmt: string; // question format + afmt: string; // answer format + }[]; +} + +/** + * Represents the parsed contents of an Anki package + */ +export interface AnkiPackage { + notes: AnkiNote[]; + cards: AnkiCard[]; + decks: AnkiDeck[]; + models: AnkiModel[]; +} + +// Local file header signature +const LOCAL_FILE_HEADER_SIG = 0x04034b50; +const CENTRAL_DIR_SIG = 0x02014b50; +const END_CENTRAL_DIR_SIG = 0x06054b50; + +/** + * Parse a ZIP file and extract entries + * This is a minimal implementation for .apkg files + */ +async function parseZip(filePath: string): Promise<Map<string, Buffer>> { + const fileHandle = await open(filePath, "r"); + const stat = await fileHandle.stat(); + const fileSize = stat.size; + + try { + const entries = new Map<string, Buffer>(); + + // Read the entire file for simplicity (apkg files are typically small) + const buffer = Buffer.alloc(fileSize); + await fileHandle.read(buffer, 0, fileSize, 0); + + let offset = 0; + + while (offset < fileSize) { + // Read signature + const sig = buffer.readUInt32LE(offset); + + if (sig === LOCAL_FILE_HEADER_SIG) { + // Local file header + const compressionMethod = buffer.readUInt16LE(offset + 8); + const compressedSize = buffer.readUInt32LE(offset + 18); + const fileNameLength = buffer.readUInt16LE(offset + 26); + const extraFieldLength = buffer.readUInt16LE(offset + 28); + + const fileName = buffer + .subarray(offset + 30, offset + 30 + fileNameLength) + .toString("utf8"); + const dataOffset = offset + 30 + fileNameLength + extraFieldLength; + + // Extract the data + const compressedData = buffer.subarray( + dataOffset, + dataOffset + compressedSize, + ); + + let data: Buffer; + if (compressionMethod === 0) { + // Stored (no compression) + data = compressedData; + } else if (compressionMethod === 8) { + // Deflate + data = await inflateBuffer(compressedData); + } else { + throw new Error( + `Unsupported compression method: ${compressionMethod}`, + ); + } + + entries.set(fileName, data); + + offset = dataOffset + compressedSize; + } else if (sig === CENTRAL_DIR_SIG || sig === END_CENTRAL_DIR_SIG) { + // We've reached the central directory, stop parsing + break; + } else { + // Unknown signature, try to move forward + offset++; + } + } + + return entries; + } finally { + await fileHandle.close(); + } +} + +/** + * Inflate a deflate-compressed buffer + */ +function inflateBuffer(data: Buffer): Promise<Buffer> { + return new Promise((resolve, reject) => { + const chunks: Buffer[] = []; + const inflate = createInflateRaw(); + + inflate.on("data", (chunk) => chunks.push(chunk)); + inflate.on("end", () => resolve(Buffer.concat(chunks))); + inflate.on("error", reject); + + inflate.write(data); + inflate.end(); + }); +} + +/** + * Extract and parse an Anki package file (.apkg) + */ +export async function parseAnkiPackage(filePath: string): Promise<AnkiPackage> { + if (!existsSync(filePath)) { + throw new Error(`File not found: ${filePath}`); + } + + // Extract ZIP contents + const entries = await parseZip(filePath); + + // Find the database file + let dbBuffer: Buffer | undefined; + let dbFormat: "anki2" | "anki21" | "anki21b" | undefined; + + // Check for different database formats (newest first) + if (entries.has("collection.anki21b")) { + dbBuffer = entries.get("collection.anki21b"); + dbFormat = "anki21b"; + } else if (entries.has("collection.anki21")) { + dbBuffer = entries.get("collection.anki21"); + dbFormat = "anki21"; + } else if (entries.has("collection.anki2")) { + dbBuffer = entries.get("collection.anki2"); + dbFormat = "anki2"; + } + + if (!dbBuffer || !dbFormat) { + const availableFiles = Array.from(entries.keys()).join(", "); + throw new Error( + `No Anki database found in package. Available files: ${availableFiles}`, + ); + } + + // For anki21b format, the database is zstd compressed + if (dbFormat === "anki21b") { + throw new Error( + "anki21b format (zstd compressed) is not yet supported. Please export from Anki using the legacy format.", + ); + } + + // Write database to temp file (node:sqlite requires a file path) + const tempDir = join( + tmpdir(), + `kioku-anki-${randomBytes(8).toString("hex")}`, + ); + await mkdir(tempDir, { recursive: true }); + const tempDbPath = join(tempDir, "collection.db"); + + try { + await writeFile(tempDbPath, dbBuffer); + + // Parse the SQLite database + return parseAnkiDatabase(tempDbPath); + } finally { + // Clean up temp files + await rm(tempDir, { recursive: true, force: true }); + } +} + +/** + * Parse an Anki SQLite database + */ +function parseAnkiDatabase(dbPath: string): AnkiPackage { + const db = new DatabaseSync(dbPath, { open: true }); + + try { + // Parse notes + const notes = parseNotes(db); + + // Parse cards + const cards = parseCards(db); + + // Parse decks and models from the col table + const { decks, models } = parseCollection(db); + + return { notes, cards, decks, models }; + } finally { + db.close(); + } +} + +/** + * Parse notes from the database + */ +function parseNotes(db: DatabaseSync): AnkiNote[] { + const stmt = db.prepare( + "SELECT id, guid, mid, mod, tags, flds, sfld FROM notes", + ); + const rows = stmt.all() as Array<{ + id: number; + guid: string; + mid: number; + mod: number; + tags: string; + flds: string; + sfld: string; + }>; + + return rows.map((row) => ({ + id: row.id, + guid: row.guid, + mid: row.mid, + mod: row.mod, + tags: row.tags + .trim() + .split(/\s+/) + .filter((t) => t.length > 0), + fields: row.flds.split("\x1f"), + sfld: row.sfld, + })); +} + +/** + * Parse cards from the database + */ +function parseCards(db: DatabaseSync): AnkiCard[] { + const stmt = db.prepare( + "SELECT id, nid, did, ord, mod, type, queue, due, ivl, factor, reps, lapses FROM cards", + ); + const rows = stmt.all() as Array<{ + id: number; + nid: number; + did: number; + ord: number; + mod: number; + type: number; + queue: number; + due: number; + ivl: number; + factor: number; + reps: number; + lapses: number; + }>; + + return rows.map((row) => ({ + id: row.id, + nid: row.nid, + did: row.did, + ord: row.ord, + mod: row.mod, + type: row.type, + queue: row.queue, + due: row.due, + ivl: row.ivl, + factor: row.factor, + reps: row.reps, + lapses: row.lapses, + })); +} + +/** + * Parse collection metadata (decks and models) + */ +function parseCollection(db: DatabaseSync): { + decks: AnkiDeck[]; + models: AnkiModel[]; +} { + const stmt = db.prepare("SELECT decks, models FROM col LIMIT 1"); + const row = stmt.get() as { decks: string; models: string } | undefined; + + if (!row) { + throw new Error("No collection data found in database"); + } + + // Parse decks JSON + const decksJson = JSON.parse(row.decks) as Record< + string, + { id: number; name: string; desc?: string } + >; + const decks: AnkiDeck[] = Object.values(decksJson).map((d) => ({ + id: d.id, + name: d.name, + description: d.desc || "", + })); + + // Parse models JSON + const modelsJson = JSON.parse(row.models) as Record< + string, + { + id: number; + name: string; + flds: Array<{ name: string }>; + tmpls: Array<{ name: string; qfmt: string; afmt: string }>; + } + >; + const models: AnkiModel[] = Object.values(modelsJson).map((m) => ({ + id: m.id, + name: m.name, + fields: m.flds.map((f) => f.name), + templates: m.tmpls.map((t) => ({ + name: t.name, + qfmt: t.qfmt, + afmt: t.afmt, + })), + })); + + return { decks, models }; +} + +/** + * Get the list of files in a ZIP archive + */ +export async function listAnkiPackageContents( + filePath: string, +): Promise<string[]> { + const entries = await parseZip(filePath); + return Array.from(entries.keys()); +} |
