dexie progress

This commit is contained in:
vcoppe
2024-05-03 15:59:34 +02:00
parent cd919258ad
commit 6c9faf54b1
10 changed files with 334 additions and 175 deletions

View File

@@ -6,49 +6,96 @@ import { fileOrder, selectedFiles } from './stores';
class Database extends Dexie {
fileids!: Dexie.Table<string, string>;
files!: Dexie.Table<FreezedObject<GPXFile>, string>;
patches!: Dexie.Table<{ patch: Patch[], inversePatch: Patch[] }, number>;
settings!: Dexie.Table<any, string>;
constructor() {
super("Database");
this.version(1).stores({
files: ',file',
patches: '++id,patch,inversePatch',
settings: ',value'
super("Database", {
cache: 'immutable'
});
this.version(1).stores({
fileids: ',&fileid',
files: '',
patches: ',patch',
settings: ''
});
this.files.add
}
}
const db = new Database();
function dexieStore<T>(querier: () => T | Promise<T>): Readable<T> {
const dexieObservable = liveQuery(querier)
return {
subscribe(run, invalidate) {
return dexieObservable.subscribe(run, invalidate).unsubscribe
function dexieFileStore(querier: () => FreezedObject<GPXFile> | undefined | Promise<FreezedObject<GPXFile> | undefined>): Readable<GPXFile> {
let store = writable<GPXFile>(undefined);
liveQuery(querier).subscribe(value => {
if (value !== undefined) {
let gpx = new GPXFile(value);
fileState.set(gpx._data.id, gpx);
store.set(gpx);
}
});
return {
subscribe: store.subscribe,
};
}
function dexieStore<T>(querier: () => T | Promise<T>, initial?: T): Readable<T> {
let store = writable<T>(initial);
liveQuery(querier).subscribe(value => {
if (value !== undefined) {
store.set(value);
}
});
return {
subscribe: store.subscribe,
};
}
function updateFiles(files: (FreezedObject<GPXFile> | undefined)[], add: boolean = false) {
let filteredFiles = files.filter(file => file !== undefined) as FreezedObject<GPXFile>[];
let fileIds = filteredFiles.map(file => file._data.id);
if (add) {
return db.transaction('rw', db.fileids, db.files, async () => {
await db.fileids.bulkAdd(fileIds, fileIds);
await db.files.bulkAdd(filteredFiles, fileIds);
});
} else {
return db.files.bulkPut(filteredFiles, fileIds);
}
}
export function updateFiles(files: FreezedObject<GPXFile>[]) {
console.log(files);
return db.files.bulkPut(files, files.map(file => file._data.id));
function deleteFiles(fileIds: string[]) {
return db.transaction('rw', db.fileids, db.files, async () => {
await db.fileids.bulkDelete(fileIds);
await db.files.bulkDelete(fileIds);
});
}
export const fileObservers: Writable<Map<string, Readable<FreezedObject<GPXFile>>>> = writable(new Map());
export const fileState: Map<string, FreezedObject<GPXFile>> = new Map(); // Used to generate patches
function commitFileStateChange(newFileState: ReadonlyMap<string, FreezedObject<GPXFile>>, patch: Patch[]) {
if (newFileState.size > fileState.size) {
return updateFiles(getChangedFileIds(patch).map((fileId) => newFileState.get(fileId)), true);
} else if (newFileState.size === fileState.size) {
return updateFiles(getChangedFileIds(patch).map((fileId) => newFileState.get(fileId)));
} else {
return deleteFiles(getChangedFileIds(patch));
}
}
liveQuery(() => db.files.toArray()).subscribe(dbFiles => {
export const fileObservers: Writable<Map<string, Readable<GPXFile | undefined>>> = writable(new Map());
const fileState: Map<string, GPXFile> = new Map(); // Used to generate patches
liveQuery(() => db.fileids.toArray()).subscribe(dbFileIds => {
// Find new files to observe
let newFiles = dbFiles.map(file => file._data.id).filter(id => !get(fileObservers).has(id));
let newFiles = dbFileIds.filter(id => !get(fileObservers).has(id));
// Find deleted files to stop observing
let deletedFiles = Array.from(get(fileObservers).keys()).filter(id => !dbFiles.find(file => file._data.id === id));
let deletedFiles = Array.from(get(fileObservers).keys()).filter(id => !dbFileIds.find(fileId => fileId === id));
// Update the store
if (newFiles.length > 0 || deletedFiles.length > 0) {
fileObservers.update($files => {
newFiles.forEach(id => {
$files.set(id, dexieStore(() => db.files.get(id)));
$files.set(id, dexieFileStore(() => db.files.get(id)));
});
deletedFiles.forEach(id => {
$files.delete(id);
@@ -56,71 +103,75 @@ liveQuery(() => db.files.toArray()).subscribe(dbFiles => {
});
return $files;
});
console.log(get(fileObservers));
}
// Update fileState
dbFiles.forEach(file => {
fileState.set(file._data.id, file);
});
});
const patchIndex = dexieStore(() => db.settings.get('patchIndex') ?? -1);
const patches = dexieStore(() => db.patches.toArray());
export const canUndo = derived(patchIndex, $patchIndex => $patchIndex >= 0);
export const canRedo = derived([patchIndex, patches], ([$patchIndex, $patches]) => $patchIndex < $patches.length - 1);
const patchIndex: Readable<number> = dexieStore(() => db.settings.get('patchIndex'), -1);
const patches: Readable<{ patch: Patch[], inversePatch: Patch[] }[]> = dexieStore(() => db.patches.toArray(), []);
export const canUndo: Readable<boolean> = derived(patchIndex, ($patchIndex) => $patchIndex >= 0);
export const canRedo: Readable<boolean> = derived([patchIndex, patches], ([$patchIndex, $patches]) => $patchIndex < $patches.length - 1);
export function applyGlobal(callback: (files: Map<string, GPXFile>) => void) {
const [newFileState, patch, inversePatch] = produceWithPatches(fileState, callback);
appendPatches(patch, inversePatch, true);
return updateFiles(Array.from(newFileState.values()));
return commitFileStateChange(newFileState, patch);
}
function applyToFiles(fileIds: string[], callback: (file: GPXFile) => void) {
const [newFileState, patch, inversePatch] = produceWithPatches(fileState, (draft) => {
fileIds.forEach((fileId) => {
callback(draft.get(fileId));
let file = draft.get(fileId);
if (file) {
callback(file);
}
});
});
appendPatches(patch, inversePatch, false);
return updateFiles(fileIds.map((fileId) => newFileState.get(fileId)));
return commitFileStateChange(newFileState, patch);
}
function appendPatches(patch: Patch[], inversePatch: Patch[], global: boolean) {
db.patches.where('id').above(patchIndex).delete();
db.patches.add({
patch,
inversePatch
async function appendPatches(patch: Patch[], inversePatch: Patch[], global: boolean) {
if (get(patchIndex) !== undefined) {
db.patches.where(':id').above(get(patchIndex)).delete();
}
db.transaction('rw', db.patches, db.settings, async () => {
await db.patches.put({
patch,
inversePatch
}, get(patchIndex) + 1);
await db.settings.put(get(patchIndex) + 1, 'patchIndex');
});
db.settings.put(get(patchIndex) + 1, 'patchIndex');
}
function applyPatch(patch: Patch[]) {
let newFileState = applyPatches(fileState, patch);
let changedFiles = [];
return commitFileStateChange(newFileState, patch);
}
function getChangedFileIds(patch: Patch[]) {
let changedFileIds = [];
for (let p of patch) {
let fileId = p.p?.toString();
if (fileId) {
let newFile = newFileState.get(fileId);
if (newFile) {
changedFiles.push(newFile);
}
changedFileIds.push(fileId);
}
}
return updateFiles(changedFiles);
return changedFileIds;
}
function getFileId() {
for (let index = 0; ; index++) {
function getFileIds(n: number) {
let ids = [];
for (let index = 0; ids.length < n; index++) {
let id = `gpx-${index}`;
if (!get(fileObservers).has(id)) {
return id;
ids.push(id);
}
}
return ids;
}
export function undo() {
@@ -141,17 +192,16 @@ export function redo() {
export const dbUtils = {
add: (file: GPXFile) => {
file._data.id = getFileId();
console.log(file._data.id);
let result = applyGlobal((draft) => {
file._data.id = getFileIds(1)[0];
return applyGlobal((draft) => {
draft.set(file._data.id, file);
});
console.log(result);
},
addMultiple: (files: GPXFile[]) => {
applyGlobal((draft) => {
files.forEach((file) => {
file._data.id = getFileId();
return applyGlobal((draft) => {
let ids = getFileIds(files.length);
files.forEach((file, index) => {
file._data.id = ids[index];
draft.set(file._data.id, file);
});
});
@@ -164,12 +214,13 @@ export const dbUtils = {
},
duplicateSelectedFiles: () => {
applyGlobal((draft) => {
get(fileOrder).forEach((fileId) => {
let ids = getFileIds(get(fileOrder).length);
get(fileOrder).forEach((fileId, index) => {
if (get(selectedFiles).has(fileId)) {
let file = draft.get(fileId);
if (file) {
let clone = file.clone();
clone._data.id = getFileId();
clone._data.id = ids[index];
draft.set(clone._data.id, clone);
}
}