-
Notifications
You must be signed in to change notification settings - Fork 4
Expand file tree
/
Copy pathtest-migration.js
More file actions
726 lines (630 loc) · 29.1 KB
/
test-migration.js
File metadata and controls
726 lines (630 loc) · 29.1 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
/**
* syncSource Migration Simulation Tests
*
* Verifies that server schema migrations correctly handle the syncSource removal:
* - snake_case DBs: sync_source dropped during table rebuild
* - camelCase DBs: orphaned syncSource column persists harmlessly
* - Items with various syncSource values survive migration
* - Items pushed without syncSource work correctly
* - _sync metadata preserved through table rebuild
*
* Run: cd backend/server && node --test test-migration.js
*/
const { describe, it, before, after, beforeEach, afterEach } = require("node:test");
const assert = require("node:assert");
const fs = require("fs");
const path = require("path");
const Database = require("better-sqlite3");
// Use isolated test directory
const TEST_DATA_DIR = path.join(__dirname, "test-data-migration");
process.env.DATA_DIR = TEST_DATA_DIR;
function cleanTestDir() {
if (fs.existsSync(TEST_DATA_DIR)) {
fs.rmSync(TEST_DATA_DIR, { recursive: true });
}
}
function createLegacyDb(userId, profileId = "default") {
const dir = path.join(TEST_DATA_DIR, userId, "profiles", profileId);
fs.mkdirSync(dir, { recursive: true });
const dbPath = path.join(dir, "datastore.sqlite");
const db = new Database(dbPath);
db.pragma("journal_mode = WAL");
return db;
}
function freshDbModule() {
delete require.cache[require.resolve("./db")];
return require("./db");
}
function getColumnNames(adapter, table) {
return adapter.all(`PRAGMA table_info(${table})`).map(c => c.name);
}
function getColumnSet(adapter, table) {
return new Set(getColumnNames(adapter, table));
}
cleanTestDir();
describe("syncSource Migration Tests", () => {
afterEach(() => {
// Clean up db module connections
try {
const db = require("./db");
db.closeAllConnections();
} catch (e) { /* ignore */ }
delete require.cache[require.resolve("./db")];
cleanTestDir();
});
describe("Scenario 1: Snake_case DB with sync_source — migration preserves data", () => {
it("should rename snake_case columns, leave sync_source as orphaned, and preserve data", () => {
// With modern SQLite (3.25+), ALTER RENAME succeeds even with indexes.
// Since sync_source has no mapping in itemRenames, it persists as orphaned.
// It's only dropped when a rebuild triggers for OTHER reasons (e.g., type migration).
const legacyDb = createLegacyDb("snake-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
content TEXT,
metadata TEXT,
sync_id TEXT DEFAULT '',
sync_source TEXT DEFAULT '',
synced_at INTEGER DEFAULT 0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
deleted_at INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
last_used INTEGER NOT NULL,
frecency_score REAL DEFAULT 0.0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE item_tags (
item_id TEXT NOT NULL,
tag_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
PRIMARY KEY (item_id, tag_id)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
// Seed items with various sync_source values
const now = Date.now();
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES (?, 'url', 'https://local.com', NULL, '', '', 0, ?, ?, 0)
`).run("item-a", now, now);
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES (?, 'url', 'https://synced.com', NULL, 'remote-1', 'server', 1000, ?, ?, 0)
`).run("item-b", now - 1000, now);
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES (?, 'text', 'deleted note', NULL, 'remote-2', 'server', 1000, ?, ?, ?)
`).run("item-c", now - 2000, now - 1000, now - 500);
legacyDb.close();
// Open via db module — triggers initializeSchema
const db = freshDbModule();
const conn = db.getConnection("snake-user");
// Verify renamed columns exist
const cols = getColumnSet(conn, "items");
assert.ok(cols.has("syncId"), "syncId should exist");
assert.ok(cols.has("syncedAt"), "syncedAt should exist");
assert.ok(cols.has("createdAt"), "createdAt should exist");
assert.ok(cols.has("updatedAt"), "updatedAt should exist");
assert.ok(cols.has("deletedAt"), "deletedAt should exist");
// sync_source persists as orphaned column (ALTER RENAME succeeds for all mapped
// columns, so no rebuild triggers). This is harmless — the column is not in
// itemRenames so it's never mapped, and the new code never reads/writes it.
assert.ok(!cols.has("sync_id"), "sync_id should be renamed to syncId");
assert.ok(!cols.has("synced_at"), "synced_at should be renamed to syncedAt");
assert.ok(!cols.has("created_at"), "created_at should be renamed to createdAt");
// Verify data preserved
const items = db.getItems("snake-user", null, "default", true);
assert.strictEqual(items.length, 3, "all 3 items should survive migration");
const itemA = items.find(i => i.id === "item-a");
assert.ok(itemA, "item-a should exist");
assert.strictEqual(itemA.content, "https://local.com");
const itemB = items.find(i => i.id === "item-b");
assert.ok(itemB, "item-b should exist");
assert.strictEqual(itemB.content, "https://synced.com");
const itemC = items.find(i => i.id === "item-c");
assert.ok(itemC, "item-c should exist");
assert.ok(itemC.deletedAt > 0, "item-c should still be soft-deleted");
// Verify syncId preserved
const rawB = conn.get("SELECT syncId, syncedAt FROM items WHERE id = 'item-b'");
assert.strictEqual(rawB.syncId, "remote-1", "syncId should be preserved");
assert.strictEqual(rawB.syncedAt, 1000, "syncedAt should be preserved");
// Verify all operations work despite orphaned sync_source column
const newId = db.saveUrl("snake-user", "https://new.com", ["tag1"]);
assert.ok(newId);
// 2 non-deleted originals + 1 new = 3 (item-c is soft-deleted)
assert.strictEqual(db.getItems("snake-user").length, 3);
db.closeAllConnections();
});
it("should drop sync_source when table rebuild triggers (production schema with triggers)", () => {
// Production DBs with views/triggers referencing snake_case columns cause
// ALTER RENAME to fail, which triggers rebuildTableIfNeeded.
// The rebuild creates a new table from the target schema (no sync_source).
const legacyDb = createLegacyDb("rebuild-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
content TEXT,
metadata TEXT,
sync_id TEXT DEFAULT '',
sync_source TEXT DEFAULT '',
synced_at INTEGER DEFAULT 0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
deleted_at INTEGER DEFAULT 0
);
-- Trigger referencing created_at prevents ALTER RENAME from succeeding
CREATE TRIGGER items_updated_at AFTER UPDATE ON items
BEGIN
UPDATE items SET updated_at = strftime('%s','now') * 1000 WHERE id = NEW.id;
END;
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
last_used INTEGER NOT NULL,
frecency_score REAL DEFAULT 0.0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE item_tags (
item_id TEXT NOT NULL,
tag_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
PRIMARY KEY (item_id, tag_id)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('trigger-1', 'url', 'https://example.com', '', 'server', 0, ?, ?, 0)
`).run(now, now);
legacyDb.close();
const db = freshDbModule();
const conn = db.getConnection("rebuild-user");
const cols = getColumnSet(conn, "items");
// If trigger caused ALTER RENAME to fail → rebuild triggers → sync_source dropped
// If trigger didn't block it → sync_source persists as orphaned
// Either way, the important thing is: all operations work
assert.ok(cols.has("syncId"), "syncId should exist after migration");
assert.ok(cols.has("syncedAt"), "syncedAt should exist after migration");
assert.ok(cols.has("createdAt"), "createdAt should exist after migration");
// Data survived
const items = db.getItems("rebuild-user");
assert.strictEqual(items.length, 1);
assert.strictEqual(items[0].content, "https://example.com");
// New items can be saved
const newId = db.saveUrl("rebuild-user", "https://new.com");
assert.ok(newId);
db.closeAllConnections();
});
});
describe("Scenario 2: CamelCase DB with orphaned syncSource — no rebuild triggered", () => {
it("should leave orphaned syncSource column and operate correctly", () => {
const legacyDb = createLegacyDb("camel-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL CHECK(type IN ('url', 'text', 'tagset', 'image')),
content TEXT,
metadata TEXT,
syncId TEXT DEFAULT '',
syncSource TEXT DEFAULT '',
syncedAt INTEGER DEFAULT 0,
createdAt INTEGER NOT NULL,
updatedAt INTEGER NOT NULL,
deletedAt INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
lastUsed INTEGER NOT NULL,
frecencyScore REAL DEFAULT 0.0,
createdAt INTEGER NOT NULL,
updatedAt INTEGER NOT NULL
);
CREATE TABLE item_tags (
itemId TEXT NOT NULL,
tagId TEXT NOT NULL,
createdAt INTEGER NOT NULL,
PRIMARY KEY (itemId, tagId)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
legacyDb.prepare(`
INSERT INTO items (id, type, content, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt)
VALUES ('orphan-1', 'url', 'https://example.com', '', '', 0, ?, ?, 0)
`).run(now, now);
legacyDb.prepare(`
INSERT INTO items (id, type, content, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt)
VALUES ('orphan-2', 'text', 'A note', 'sync-abc', 'server', 5000, ?, ?, 0)
`).run(now - 1000, now);
legacyDb.close();
const db = freshDbModule();
const conn = db.getConnection("camel-user");
// syncSource column persists (no rebuild triggered — columns already camelCase)
const cols = getColumnSet(conn, "items");
assert.ok(cols.has("syncSource"), "syncSource should persist as orphaned column (no rebuild triggered)");
// validateSchema should pass (extra columns are ignored)
// (If we got here without throwing, validateSchema passed)
// All operations should work correctly
const items = db.getItems("camel-user");
assert.strictEqual(items.length, 2);
// getItemsSince should work
const since = db.getItemsSince("camel-user", 0);
assert.strictEqual(since.length, 2);
// saveItem should work (no syncSource in INSERT)
const newId = db.saveUrl("camel-user", "https://new.com", ["tag1"]);
assert.ok(newId);
assert.strictEqual(db.getItems("camel-user").length, 3);
// deleteItem should work
db.deleteItem("camel-user", "orphan-1");
assert.strictEqual(db.getItems("camel-user").length, 2);
db.closeAllConnections();
});
});
describe("Scenario 3: Extension-origin items become pushable after migration", () => {
it("should preserve items with syncSource=history/tab/bookmark through migration", () => {
const legacyDb = createLegacyDb("ext-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
content TEXT,
metadata TEXT,
sync_id TEXT DEFAULT '',
sync_source TEXT DEFAULT '',
synced_at INTEGER DEFAULT 0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
deleted_at INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
last_used INTEGER NOT NULL,
frecency_score REAL DEFAULT 0.0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE item_tags (
item_id TEXT NOT NULL,
tag_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
PRIMARY KEY (item_id, tag_id)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
// Extension-origin items (previously blocked from sync)
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('hist-1', 'url', 'https://history.com', '', 'history', 0, ?, ?, 0)
`).run(now, now);
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('tab-1', 'url', 'https://tab.com', '', 'tab', 0, ?, ?, 0)
`).run(now, now);
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('bm-1', 'url', 'https://bookmark.com', '', 'bookmark', 0, ?, ?, 0)
`).run(now, now);
// Regular locally-created item
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('local-1', 'url', 'https://local.com', '', '', 0, ?, ?, 0)
`).run(now, now);
// Already-synced server item
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('server-1', 'url', 'https://server.com', 'remote-1', 'server', ?, ?, ?, 0)
`).run(now - 500, now - 1000, now - 500);
legacyDb.close();
const db = freshDbModule();
const conn = db.getConnection("ext-user");
// All items should survive
const items = db.getItems("ext-user");
assert.strictEqual(items.length, 5);
// Under new push algorithm (syncedAt === 0 → push):
// hist-1, tab-1, bm-1, local-1 all have syncedAt=0 → eligible for push
// server-1 has syncedAt > 0 → only pushed if modified since last sync
const pushable = conn.all(
"SELECT id FROM items WHERE CAST(deletedAt AS INTEGER) = 0 AND (syncedAt = 0 OR updatedAt > syncedAt)"
);
const pushableIds = pushable.map(r => r.id).sort();
// All never-synced items should be pushable (including former extension imports)
assert.ok(pushableIds.includes("hist-1"), "history item should be pushable under new algorithm");
assert.ok(pushableIds.includes("tab-1"), "tab item should be pushable under new algorithm");
assert.ok(pushableIds.includes("bm-1"), "bookmark item should be pushable under new algorithm");
assert.ok(pushableIds.includes("local-1"), "local item should be pushable");
db.closeAllConnections();
});
});
describe("Scenario 4: Server receives items WITHOUT syncSource from refactored desktop", () => {
it("should handle saveItem without syncSource field", () => {
const db = freshDbModule();
// Simulate desktop push: POST /items with { type, content, tags, sync_id, metadata }
// No syncSource field at all
const id1 = db.saveItem("fresh-user", "url", "https://example.com", ["web"], null, "client-local-id-1");
assert.ok(id1, "should create item without syncSource");
const id2 = db.saveItem("fresh-user", "text", "A note", ["note"]);
assert.ok(id2, "should create item without syncId or syncSource");
// Verify items stored correctly
const items = db.getItems("fresh-user");
assert.strictEqual(items.length, 2);
const urlItem = items.find(i => i.type === "url");
assert.strictEqual(urlItem.content, "https://example.com");
assert.deepStrictEqual(urlItem.tags, ["web"]);
// Verify getItemsSince works (sync pull)
const sinceItems = db.getItemsSince("fresh-user", 0);
assert.strictEqual(sinceItems.length, 2);
// Verify incremental sync works
const timestamp = Date.now();
const id3 = db.saveItem("fresh-user", "url", "https://new.com", [], null, "client-local-id-2");
const incrementalItems = db.getItemsSince("fresh-user", timestamp - 1);
assert.ok(incrementalItems.length >= 1, "should return newly created item");
db.closeAllConnections();
});
});
describe("Scenario 5: _sync metadata preserved through table rebuild", () => {
it("should preserve _sync device metadata in JSON through snake_case migration", () => {
const legacyDb = createLegacyDb("metadata-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
content TEXT,
metadata TEXT,
sync_id TEXT DEFAULT '',
sync_source TEXT DEFAULT '',
synced_at INTEGER DEFAULT 0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
deleted_at INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
last_used INTEGER NOT NULL,
frecency_score REAL DEFAULT 0.0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE item_tags (
item_id TEXT NOT NULL,
tag_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
PRIMARY KEY (item_id, tag_id)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
const deviceUUID = "550e8400-e29b-41d4-a716-446655440000";
const metadata = JSON.stringify({
_sync: {
createdBy: deviceUUID,
createdAt: now - 5000,
modifiedBy: deviceUUID,
modifiedAt: now - 1000
},
title: "Example Page",
description: "Test item with device metadata"
});
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('meta-1', 'url', 'https://example.com', ?, 'remote-1', 'server', ?, ?, ?, 0)
`).run(metadata, now - 500, now - 5000, now - 1000);
// Item without _sync metadata (older item)
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('meta-2', 'text', 'Old note', '{"title":"Old"}', '', '', 0, ?, ?, 0)
`).run(now - 10000, now - 10000);
legacyDb.close();
const db = freshDbModule();
const conn = db.getConnection("metadata-user");
// Verify _sync metadata survived table rebuild
const items = db.getItems("metadata-user");
assert.strictEqual(items.length, 2);
const metaItem = items.find(i => i.id === "meta-1");
assert.ok(metaItem, "meta-1 should exist");
assert.ok(metaItem.metadata, "metadata should exist");
assert.ok(metaItem.metadata._sync, "_sync should be preserved");
assert.strictEqual(metaItem.metadata._sync.createdBy, deviceUUID, "createdBy should be preserved");
assert.strictEqual(metaItem.metadata._sync.createdAt, now - 5000, "createdAt should be preserved");
assert.strictEqual(metaItem.metadata._sync.modifiedBy, deviceUUID, "modifiedBy should be preserved");
assert.strictEqual(metaItem.metadata._sync.modifiedAt, now - 1000, "modifiedAt should be preserved");
assert.strictEqual(metaItem.metadata.title, "Example Page", "other metadata fields preserved");
assert.strictEqual(metaItem.metadata.description, "Test item with device metadata");
// Verify old item without _sync also works
const oldItem = items.find(i => i.id === "meta-2");
assert.ok(oldItem.metadata);
assert.strictEqual(oldItem.metadata.title, "Old");
assert.strictEqual(oldItem.metadata._sync, undefined, "old item should not have _sync");
db.closeAllConnections();
});
it("should preserve _sync metadata in camelCase DB (no rebuild)", () => {
const legacyDb = createLegacyDb("camel-meta-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL CHECK(type IN ('url', 'text', 'tagset', 'image')),
content TEXT,
metadata TEXT,
syncId TEXT DEFAULT '',
syncSource TEXT DEFAULT '',
syncedAt INTEGER DEFAULT 0,
createdAt INTEGER NOT NULL,
updatedAt INTEGER NOT NULL,
deletedAt INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
lastUsed INTEGER NOT NULL,
frecencyScore REAL DEFAULT 0.0,
createdAt INTEGER NOT NULL,
updatedAt INTEGER NOT NULL
);
CREATE TABLE item_tags (
itemId TEXT NOT NULL,
tagId TEXT NOT NULL,
createdAt INTEGER NOT NULL,
PRIMARY KEY (itemId, tagId)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
const mobileUUID = "a1b2c3d4-e5f6-7890-abcd-ef1234567890";
const metadata = JSON.stringify({
_sync: {
createdBy: mobileUUID,
createdAt: now - 3000,
modifiedBy: mobileUUID,
modifiedAt: now - 500
}
});
legacyDb.prepare(`
INSERT INTO items (id, type, content, metadata, syncId, syncSource, syncedAt, createdAt, updatedAt, deletedAt)
VALUES ('cm-1', 'url', 'https://mobile.com', ?, 'mobile-sync-1', 'server', ?, ?, ?, 0)
`).run(metadata, now - 100, now - 3000, now - 500);
legacyDb.close();
const db = freshDbModule();
db.getConnection("camel-meta-user"); // triggers initializeSchema
const items = db.getItems("camel-meta-user");
assert.strictEqual(items.length, 1);
assert.ok(items[0].metadata._sync, "_sync should survive no-rebuild path");
assert.strictEqual(items[0].metadata._sync.createdBy, mobileUUID);
db.closeAllConnections();
});
});
describe("Scenario 6: Mixed syncSource values + new push algorithm correctness", () => {
it("should correctly identify pushable items with syncedAt-based filtering", () => {
const legacyDb = createLegacyDb("push-user");
legacyDb.exec(`
CREATE TABLE items (
id TEXT PRIMARY KEY,
type TEXT NOT NULL,
content TEXT,
metadata TEXT,
sync_id TEXT DEFAULT '',
sync_source TEXT DEFAULT '',
synced_at INTEGER DEFAULT 0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL,
deleted_at INTEGER DEFAULT 0
);
CREATE TABLE tags (
id TEXT PRIMARY KEY,
name TEXT NOT NULL UNIQUE,
frequency INTEGER DEFAULT 1,
last_used INTEGER NOT NULL,
frecency_score REAL DEFAULT 0.0,
created_at INTEGER NOT NULL,
updated_at INTEGER NOT NULL
);
CREATE TABLE item_tags (
item_id TEXT NOT NULL,
tag_id TEXT NOT NULL,
created_at INTEGER NOT NULL,
PRIMARY KEY (item_id, tag_id)
);
CREATE TABLE settings (key TEXT PRIMARY KEY, value TEXT);
`);
const now = Date.now();
// Case 1: Never synced (syncedAt=0) → should push
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('never-synced', 'url', 'https://new.com', '', '', 0, ?, ?, 0)
`).run(now, now);
// Case 2: Synced, not modified since → should NOT push
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('synced-clean', 'url', 'https://clean.com', 'r1', 'server', ?, ?, ?, 0)
`).run(now, now - 1000, now - 500);
// Case 3: Synced, modified since → should push
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('synced-dirty', 'url', 'https://dirty.com', 'r2', 'server', ?, ?, ?, 0)
`).run(now - 2000, now - 3000, now - 1000);
// Case 4: Extension import (syncSource=history, syncedAt=0) → should push under new algorithm
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('ext-import', 'url', 'https://history.com', '', 'history', 0, ?, ?, 0)
`).run(now, now);
// Case 5: Soft-deleted → should NOT push (deletedAt filter)
legacyDb.prepare(`
INSERT INTO items (id, type, content, sync_id, sync_source, synced_at, created_at, updated_at, deleted_at)
VALUES ('deleted', 'url', 'https://gone.com', '', '', 0, ?, ?, ?)
`).run(now, now, now);
legacyDb.close();
const db = freshDbModule();
const conn = db.getConnection("push-user");
// New push algorithm: syncedAt=0 OR updatedAt > syncedAt, AND deletedAt=0
const pushable = conn.all(
"SELECT id FROM items WHERE CAST(deletedAt AS INTEGER) = 0 AND (syncedAt = 0 OR updatedAt > syncedAt)"
);
const pushableIds = new Set(pushable.map(r => r.id));
assert.ok(pushableIds.has("never-synced"), "never-synced item should be pushable");
assert.ok(!pushableIds.has("synced-clean"), "synced-clean item should NOT be pushable");
assert.ok(pushableIds.has("synced-dirty"), "synced-dirty item should be pushable");
assert.ok(pushableIds.has("ext-import"), "extension import should be pushable under new algorithm");
assert.ok(!pushableIds.has("deleted"), "deleted item should NOT be pushable");
assert.strictEqual(pushableIds.size, 3, "exactly 3 items should be pushable");
db.closeAllConnections();
});
});
describe("Scenario 7: End-to-end sync flow without syncSource", () => {
it("should handle full push/pull cycle using only syncId and syncedAt", () => {
const db = freshDbModule();
// Simulate client push: create items via saveItem with syncId
const id1 = db.saveItem("sync-user", "url", "https://device-a.com", ["saved"],
{ _sync: { createdBy: "device-aaa", createdAt: Date.now() } },
"device-a-local-1"
);
// Simulate another device push
const id2 = db.saveItem("sync-user", "text", "Note from device B", [],
{ _sync: { createdBy: "device-bbb", createdAt: Date.now() } },
"device-b-local-1"
);
// Simulate pull: getItemsSince(0) returns all items
const allItems = db.getItemsSince("sync-user", 0);
assert.strictEqual(allItems.length, 2);
// Verify _sync metadata preserved in response
const urlItem = allItems.find(i => i.type === "url");
assert.ok(urlItem.metadata._sync, "url item should have _sync metadata");
assert.strictEqual(urlItem.metadata._sync.createdBy, "device-aaa");
const textItem = allItems.find(i => i.type === "text");
assert.ok(textItem.metadata._sync);
assert.strictEqual(textItem.metadata._sync.createdBy, "device-bbb");
// Simulate re-push with server ID (update existing item)
db.saveItem("sync-user", "url", "https://device-a.com/updated", ["saved", "edited"],
{ _sync: { createdBy: "device-aaa", createdAt: Date.now() - 1000, modifiedBy: "device-aaa", modifiedAt: Date.now() } },
id1 // re-push using server ID
);
const updated = db.getItems("sync-user");
assert.strictEqual(updated.length, 2, "should still be 2 items (deduped by server ID)");
const updatedUrl = updated.find(i => i.type === "url");
assert.strictEqual(updatedUrl.content, "https://device-a.com/updated");
assert.deepStrictEqual(updatedUrl.tags.sort(), ["edited", "saved"]);
// Simulate tombstone push
db.saveItem("sync-user", "text", "Note from device B", [],
null, id2, "default", Date.now()
);
assert.strictEqual(db.getItems("sync-user").length, 1, "text item should be soft-deleted");
db.closeAllConnections();
});
});
});