Upgraded with LiveSync 0.11.0

This commit is contained in:
vrtmrz 2022-06-14 07:27:10 +09:00
parent 7d9f977b58
commit d3254eb2c9
7 changed files with 349 additions and 107 deletions

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "src/lib"]
path = src/lib
url = https://github.com/vrtmrz/livesync-commonlib

27
package-lock.json generated
View File

@ -18,6 +18,7 @@
"pouchdb-mapreduce": "^7.2.2", "pouchdb-mapreduce": "^7.2.2",
"pouchdb-node": "^7.2.2", "pouchdb-node": "^7.2.2",
"pouchdb-replication": "^7.2.2", "pouchdb-replication": "^7.2.2",
"transform-pouch": "^2.0.0",
"xxhash-wasm": "^1.0.1" "xxhash-wasm": "^1.0.1"
}, },
"devDependencies": { "devDependencies": {
@ -1251,6 +1252,11 @@
"uuid": "8.1.0" "uuid": "8.1.0"
} }
}, },
"node_modules/pouchdb-wrappers": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/pouchdb-wrappers/-/pouchdb-wrappers-5.0.0.tgz",
"integrity": "sha512-fXqsVn+rmlPtxaAIGaQP5TkiaT39OMwvMk+ScLLtHrmfXD2KBO6fe/qBl38N/rpTn0h/A058dPN4fLAHt550zA=="
},
"node_modules/pouchdb/node_modules/readable-stream": { "node_modules/pouchdb/node_modules/readable-stream": {
"version": "1.1.14", "version": "1.1.14",
"integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=", "integrity": "sha1-fPTFTvZI44EwhMY23SB54WbAgdk=",
@ -1392,6 +1398,14 @@
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
}, },
"node_modules/transform-pouch": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/transform-pouch/-/transform-pouch-2.0.0.tgz",
"integrity": "sha512-nDZovo0U5o0UdMNL93fMQgGjrwH9h4F/a7qqRTnF6cVA+FfgyXiJPTrSuD+LmWSO7r2deZt0P0oeCD8hkgxl5g==",
"dependencies": {
"pouchdb-wrappers": "^5.0.0"
}
},
"node_modules/ts-node": { "node_modules/ts-node": {
"version": "10.5.0", "version": "10.5.0",
"integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==", "integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==",
@ -2542,6 +2556,11 @@
"uuid": "8.1.0" "uuid": "8.1.0"
} }
}, },
"pouchdb-wrappers": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/pouchdb-wrappers/-/pouchdb-wrappers-5.0.0.tgz",
"integrity": "sha512-fXqsVn+rmlPtxaAIGaQP5TkiaT39OMwvMk+ScLLtHrmfXD2KBO6fe/qBl38N/rpTn0h/A058dPN4fLAHt550zA=="
},
"prr": { "prr": {
"version": "1.0.1", "version": "1.0.1",
"integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY=" "integrity": "sha1-0/wRS6BplaRexok/SEzrHXj19HY="
@ -2642,6 +2661,14 @@
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz", "resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o=" "integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
}, },
"transform-pouch": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/transform-pouch/-/transform-pouch-2.0.0.tgz",
"integrity": "sha512-nDZovo0U5o0UdMNL93fMQgGjrwH9h4F/a7qqRTnF6cVA+FfgyXiJPTrSuD+LmWSO7r2deZt0P0oeCD8hkgxl5g==",
"requires": {
"pouchdb-wrappers": "^5.0.0"
}
},
"ts-node": { "ts-node": {
"version": "10.5.0", "version": "10.5.0",
"integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==", "integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==",

View File

@ -20,6 +20,7 @@
"pouchdb-mapreduce": "^7.2.2", "pouchdb-mapreduce": "^7.2.2",
"pouchdb-node": "^7.2.2", "pouchdb-node": "^7.2.2",
"pouchdb-replication": "^7.2.2", "pouchdb-replication": "^7.2.2",
"transform-pouch": "^2.0.0",
"xxhash-wasm": "^1.0.1" "xxhash-wasm": "^1.0.1"
}, },
"devDependencies": { "devDependencies": {

View File

@ -7,7 +7,7 @@ Notice: **We're on the bleeding edge.** Please make sure to back your vault up!
## How to run ## How to run
```sh ```sh
git clone https://github.com/vrtmrz/filesystem-livesync git clone --recursive https://github.com/vrtmrz/filesystem-livesync
cp dat/config.sample.json dat/config.json cp dat/config.sample.json dat/config.json
# Setting up configuration # Setting up configuration
vi dat/config.json vi dat/config.json

View File

@ -10,9 +10,10 @@ import { Logger } from "./logger.js";
//@ts-ignore //@ts-ignore
import { PouchDB as PouchDB_src } from "./pouchdb.js"; import { PouchDB as PouchDB_src } from "./pouchdb.js";
import { decrypt, encrypt } from "./e2ee.js";
import { configFile, connectConfig, eachConf, Entry, EntryLeaf, LoadedEntry, LOG_LEVEL, MAX_DOC_SIZE, MAX_DOC_SIZE_BIN, NewEntry, PlainEntry, TransferEntry } from "./types.js"; import { configFile, connectConfig, eachConf, Entry, EntryLeaf, LoadedEntry, LOG_LEVEL, MAX_DOC_SIZE, MAX_DOC_SIZE_BIN, NewEntry, PlainEntry, TransferEntry } from "./types.js";
import { addKnownFile, addTouchedFile, calcDateDiff, DATEDIFF_EVEN, DATEDIFF_NEWER_A, DATEDIFF_OLDER_A, isKnownFile, isPlainText, isTouchedFile, path2unix } from "./util.js"; import { addKnownFile, addTouchedFile, calcDateDiff, DATEDIFF_EVEN, DATEDIFF_NEWER_A, DATEDIFF_OLDER_A, isKnownFile, isPlainText, isTouchedFile, path2unix } from "./util.js";
import { enableEncryption, runWithLock, shouldSplitAsPlainText, splitPieces2 } from "./lib/src/utils.js";
import { EntryDoc } from "./lib/src/types.js";
const xxhash = require("xxhash-wasm"); const xxhash = require("xxhash-wasm");
@ -80,17 +81,205 @@ function triggerProcessor(procs: string) {
runEngine(); runEngine();
}, 500); }, 500);
} }
class LRUCache {
cache = new Map<string, string>([]);
revCache = new Map<string, string>([]);
maxCache = 100;
constructor() {}
get(key: string) {
// debugger
const v = this.cache.get(key);
let hashCache: { if (v) {
[key: string]: string; // update the key to recently used.
} = {}; this.cache.delete(key);
let hashCacheRev: { this.revCache.delete(v);
[key: string]: string; this.cache.set(key, v);
} = {}; this.revCache.set(v, key);
}
return v;
}
revGet(value: string) {
// debugger
const key = this.revCache.get(value);
if (value) {
// update the key to recently used.
this.cache.delete(key);
this.revCache.delete(value);
this.cache.set(key, value);
this.revCache.set(value, key);
}
return key;
}
set(key: string, value: string) {
this.cache.set(key, value);
this.revCache.set(value, key);
if (this.cache.size > this.maxCache) {
for (const kv of this.cache) {
this.revCache.delete(kv[1]);
this.cache.delete(kv[0]);
if (this.cache.size <= this.maxCache) break;
}
}
}
}
const hashCaches = new LRUCache();
// // putDBEntry:COPIED FROM obsidian-livesync
// async function putDBEntry2(note: LoadedEntry, passphrase: string, database: PouchDB.Database<NewEntry | PlainEntry | Entry | EntryLeaf>) {
// let leftData = note.data;
// const savenNotes = [];
// let processed = 0;
// let made = 0;
// let skiped = 0;
// let pieceSize = MAX_DOC_SIZE_BIN;
// let plainSplit = false;
// let cacheUsed = 0;
// const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase));
// if (isPlainText(note._id)) {
// pieceSize = MAX_DOC_SIZE;
// plainSplit = true;
// }
// const newLeafs: EntryLeaf[] = [];
// do {
// // To keep low bandwith and database size,
// // Dedup pieces on database.
// // from 0.1.10, for best performance. we use markdown delimiters
// // 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break.
// // 2. \n\n shold break
// // 3. \r\n\r\n should break
// // 4. \n# should break.
// let cPieceSize = pieceSize;
// if (plainSplit) {
// let minimumChunkSize = 20; //default
// if (minimumChunkSize < 10) minimumChunkSize = 10;
// let longLineThreshold = 250; //default
// if (longLineThreshold < 100) longLineThreshold = 100;
// cPieceSize = 0;
// // lookup for next splittion .
// // we're standing on "\n"
// do {
// const n1 = leftData.indexOf("\n", cPieceSize + 1);
// const n2 = leftData.indexOf("\n\n", cPieceSize + 1);
// const n3 = leftData.indexOf("\r\n\r\n", cPieceSize + 1);
// const n4 = leftData.indexOf("\n#", cPieceSize + 1);
// if (n1 == -1 && n2 == -1 && n3 == -1 && n4 == -1) {
// cPieceSize = MAX_DOC_SIZE;
// break;
// }
// if (n1 > longLineThreshold) {
// // long sentence is an established piece
// cPieceSize = n1;
// } else {
// // cPieceSize = Math.min.apply([n2, n3, n4].filter((e) => e > 1));
// // ^ heavy.
// if (n1 > 0 && cPieceSize < n1) cPieceSize = n1;
// if (n2 > 0 && cPieceSize < n2) cPieceSize = n2 + 1;
// if (n3 > 0 && cPieceSize < n3) cPieceSize = n3 + 3;
// // Choose shorter, empty line and \n#
// if (n4 > 0 && cPieceSize > n4) cPieceSize = n4 + 0;
// cPieceSize++;
// }
// } while (cPieceSize < minimumChunkSize);
// }
// // piece size determined.
// const piece = leftData.substring(0, cPieceSize);
// leftData = leftData.substring(cPieceSize);
// processed++;
// let leafid = "";
// // Get hash of piece.
// let hashedPiece = "";
// if (typeof hashCache[piece] !== "undefined") {
// hashedPiece = "";
// leafid = hashCache[piece];
// skiped++;
// cacheUsed++;
// } else {
// if (passphrase != "") {
// // When encryption has been enabled, make hash to be different between each passphrase to avoid inferring password.
// hashedPiece = "+" + (h32Raw(new TextEncoder().encode(piece)) ^ userpasswordHash).toString(16);
// } else {
// hashedPiece = h32(piece);
// }
// leafid = "h:" + hashedPiece;
// //have to make
// const savePiece = piece;
// const d: EntryLeaf = {
// _id: leafid,
// data: savePiece,
// type: "leaf",
// };
// newLeafs.push(d);
// hashCache[piece] = leafid;
// hashCacheRev[leafid] = piece;
// made++;
// }
// savenNotes.push(leafid);
// } while (leftData != "");
// let saved = true;
// if (newLeafs.length > 0) {
// try {
// const result = await database.bulkDocs(newLeafs);
// for (const item of result) {
// if ((item as any).ok) {
// Logger(`save ok:id:${item.id} rev:${item.rev}`, LOG_LEVEL.VERBOSE);
// } else {
// if ((item as any).status && (item as any).status == 409) {
// // conflicted, but it would be ok in childrens.
// } else {
// Logger(`save failed:id:${item.id} rev:${item.rev}`, LOG_LEVEL.NOTICE);
// Logger(item);
// // disposeHashCache();
// saved = false;
// }
// }
// }
// } catch (ex) {
// Logger("ERROR ON SAVING LEAVES:", LOG_LEVEL.NOTICE);
// Logger(ex, LOG_LEVEL.NOTICE);
// saved = false;
// }
// }
// if (saved) {
// Logger(`note content saven, pieces:${processed} new:${made}, skip:${skiped}, cache:${cacheUsed}`);
// const newDoc: PlainEntry | NewEntry = {
// NewNote: true,
// children: savenNotes,
// _id: note._id,
// ctime: note.ctime,
// mtime: note.mtime,
// size: note.size,
// type: plainSplit ? "plain" : "newnote",
// };
// // Here for upsert logic,
// try {
// const old = await database.get(newDoc._id);
// if (!old.type || old.type == "notes" || old.type == "newnote" || old.type == "plain") {
// // simple use rev for new doc
// newDoc._rev = old._rev;
// }
// } catch (ex: any) {
// if (ex.status && ex.status == 404) {
// // NO OP/
// } else {
// throw ex;
// }
// }
// const ret = await database.put(newDoc, { force: true });
// Logger(`note saved:${newDoc._id}:${ret.rev}`);
// return ret;
// } else {
// Logger(`note coud not saved:${note._id}`);
// }
// }
// putDBEntry:COPIED FROM obsidian-livesync
async function putDBEntry(note: LoadedEntry, passphrase: string, database: PouchDB.Database<NewEntry | PlainEntry | Entry | EntryLeaf>) { async function putDBEntry(note: LoadedEntry, passphrase: string, database: PouchDB.Database<NewEntry | PlainEntry | Entry | EntryLeaf>) {
let leftData = note.data; // let leftData = note.data;
const savenNotes = []; const savenNotes = [];
let processed = 0; let processed = 0;
let made = 0; let made = 0;
@ -99,64 +288,40 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, database: Pouch
let plainSplit = false; let plainSplit = false;
let cacheUsed = 0; let cacheUsed = 0;
const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase)); const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase));
if (isPlainText(note._id)) { if (shouldSplitAsPlainText(note._id)) {
pieceSize = MAX_DOC_SIZE; pieceSize = MAX_DOC_SIZE;
plainSplit = true; plainSplit = true;
} }
const newLeafs: EntryLeaf[] = []; const newLeafs: EntryLeaf[] = [];
do { // To keep low bandwith and database size,
// To keep low bandwith and database size, // Dedup pieces on database.
// Dedup pieces on database. // from 0.1.10, for best performance. we use markdown delimiters
// from 0.1.10, for best performance. we use markdown delimiters // 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break.
// 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break. // 2. \n\n shold break
// 2. \n\n shold break // 3. \r\n\r\n should break
// 3. \r\n\r\n should break // 4. \n# should break.
// 4. \n# should break. let minimumChunkSize = 20; //default
let cPieceSize = pieceSize; if (minimumChunkSize < 10) minimumChunkSize = 10;
if (plainSplit) { let longLineThreshold = 250; //default
let minimumChunkSize = 20; //default if (longLineThreshold < 100) longLineThreshold = 100;
if (minimumChunkSize < 10) minimumChunkSize = 10;
let longLineThreshold = 250; //default
if (longLineThreshold < 100) longLineThreshold = 100;
cPieceSize = 0;
// lookup for next splittion .
// we're standing on "\n"
do {
const n1 = leftData.indexOf("\n", cPieceSize + 1);
const n2 = leftData.indexOf("\n\n", cPieceSize + 1);
const n3 = leftData.indexOf("\r\n\r\n", cPieceSize + 1);
const n4 = leftData.indexOf("\n#", cPieceSize + 1);
if (n1 == -1 && n2 == -1 && n3 == -1 && n4 == -1) {
cPieceSize = MAX_DOC_SIZE;
break;
}
if (n1 > longLineThreshold) { //benchmarhk
// long sentence is an established piece
cPieceSize = n1;
} else {
// cPieceSize = Math.min.apply([n2, n3, n4].filter((e) => e > 1));
// ^ heavy.
if (n1 > 0 && cPieceSize < n1) cPieceSize = n1;
if (n2 > 0 && cPieceSize < n2) cPieceSize = n2 + 1;
if (n3 > 0 && cPieceSize < n3) cPieceSize = n3 + 3;
// Choose shorter, empty line and \n#
if (n4 > 0 && cPieceSize > n4) cPieceSize = n4 + 0;
cPieceSize++;
}
} while (cPieceSize < minimumChunkSize);
}
// piece size determined. const pieces = splitPieces2(note.data, pieceSize, plainSplit, minimumChunkSize, longLineThreshold);
const piece = leftData.substring(0, cPieceSize); for (const piece of pieces()) {
leftData = leftData.substring(cPieceSize);
processed++; processed++;
let leafid = ""; let leafid = "";
// Get hash of piece. // Get hash of piece.
let hashedPiece = ""; let hashedPiece = "";
if (typeof hashCache[piece] !== "undefined") { let hashQ = 0; // if hash collided, **IF**, count it up.
let tryNextHash = false;
let needMake = true;
const cache = hashCaches.get(piece);
if (cache) {
hashedPiece = ""; hashedPiece = "";
leafid = hashCache[piece]; leafid = cache;
needMake = false;
skiped++; skiped++;
cacheUsed++; cacheUsed++;
} else { } else {
@ -167,42 +332,76 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, database: Pouch
hashedPiece = h32(piece); hashedPiece = h32(piece);
} }
leafid = "h:" + hashedPiece; leafid = "h:" + hashedPiece;
do {
let nleafid = leafid;
try {
nleafid = `${leafid}${hashQ}`;
const pieceData = await database.get<EntryLeaf>(nleafid);
if (pieceData.type == "leaf" && pieceData.data == piece) {
leafid = nleafid;
needMake = false;
tryNextHash = false;
hashCaches.set(piece, leafid);
} else if (pieceData.type == "leaf") {
Logger("hash:collision!!");
hashQ++;
tryNextHash = true;
} else {
leafid = nleafid;
tryNextHash = false;
}
} catch (ex) {
if (ex.status && ex.status == 404) {
//not found, we can use it.
leafid = nleafid;
needMake = true;
tryNextHash = false;
} else {
needMake = false;
tryNextHash = false;
throw ex;
}
}
} while (tryNextHash);
if (needMake) {
//have to make
const savePiece = piece;
//have to make const d: EntryLeaf = {
const savePiece = passphrase != "" ? await encrypt(piece, passphrase) : piece; _id: leafid,
data: savePiece,
const d: EntryLeaf = { type: "leaf",
_id: leafid, };
data: savePiece, newLeafs.push(d);
type: "leaf", hashCaches.set(piece, leafid);
}; made++;
newLeafs.push(d); } else {
hashCache[piece] = leafid; skiped++;
hashCacheRev[leafid] = piece; }
made++;
} }
savenNotes.push(leafid); savenNotes.push(leafid);
} while (leftData != ""); }
let saved = true; let saved = true;
if (newLeafs.length > 0) { if (newLeafs.length > 0) {
try { try {
const result = await database.bulkDocs(newLeafs); const result = await database.bulkDocs(newLeafs);
for (const item of result) { for (const item of result) {
if ((item as any).ok) { if (!(item as any).ok) {
Logger(`save ok:id:${item.id} rev:${item.rev}`, LOG_LEVEL.VERBOSE);
} else {
if ((item as any).status && (item as any).status == 409) { if ((item as any).status && (item as any).status == 409) {
// conflicted, but it would be ok in childrens. // conflicted, but it would be ok in childrens.
} else { } else {
Logger(`save failed:id:${item.id} rev:${item.rev}`, LOG_LEVEL.NOTICE); Logger(`Save failed:id:${item.id} rev:${item.rev}`, LOG_LEVEL.NOTICE);
Logger(item); Logger(item);
// this.disposeHashCache();
saved = false; saved = false;
} }
} }
} }
if (saved) {
Logger(`Chunk saved:${newLeafs.length} chunks`);
}
} catch (ex) { } catch (ex) {
Logger("ERROR ON SAVING LEAVES:", LOG_LEVEL.NOTICE); Logger("Chunk save failed:", LOG_LEVEL.NOTICE);
Logger(ex, LOG_LEVEL.NOTICE); Logger(ex, LOG_LEVEL.NOTICE);
saved = false; saved = false;
} }
@ -219,24 +418,27 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, database: Pouch
type: plainSplit ? "plain" : "newnote", type: plainSplit ? "plain" : "newnote",
}; };
// Here for upsert logic, // Here for upsert logic,
try { return await runWithLock("file:" + newDoc._id, false, async () => {
const old = await database.get(newDoc._id); try {
if (!old.type || old.type == "notes" || old.type == "newnote" || old.type == "plain") { const old = await database.get(newDoc._id);
// simple use rev for new doc if (!old.type || old.type == "notes" || old.type == "newnote" || old.type == "plain") {
newDoc._rev = old._rev; // simple use rev for new doc
newDoc._rev = old._rev;
}
} catch (ex) {
if (ex.status && ex.status == 404) {
// NO OP/
} else {
throw ex;
}
} }
} catch (ex: any) { const r = await database.put(newDoc, { force: true });
if (ex.status && ex.status == 404) { Logger(`note saved:${newDoc._id}:${r.rev}`);
// NO OP/ return r;
} else { });
throw ex;
}
}
const ret = await database.put(newDoc, { force: true });
Logger(`note saved:${newDoc._id}:${ret.rev}`);
return ret;
} else { } else {
Logger(`note coud not saved:${note._id}`); Logger(`note coud not saved:${note._id}`);
return false;
} }
} }
@ -252,6 +454,9 @@ async function eachProc(syncKey: string, config: eachConf) {
const processor = config.local?.processor ?? ""; const processor = config.local?.processor ?? "";
const remote = new PouchDB(serverURI, { auth: serverAuth }); const remote = new PouchDB(serverURI, { auth: serverAuth });
if (serverAuth.passphrase != "") {
enableEncryption(remote as PouchDB.Database<EntryDoc>, serverAuth.passphrase);
}
async function sanityCheck() { async function sanityCheck() {
let mr = await remote.info(); let mr = await remote.info();
@ -501,7 +706,7 @@ async function eachProc(syncKey: string, config: eachConf) {
addTouchedFile(pathSrc, mtime); addTouchedFile(pathSrc, mtime);
await pushFile(pathSrc, stat); await pushFile(pathSrc, stat);
// this.watchVaultChange(path, stat); // watchVaultChange(path, stat);
}); });
log("Start Database watching"); log("Start Database watching");
openConnection(conf, config.auto_reconnect ?? false); openConnection(conf, config.auto_reconnect ?? false);
@ -563,21 +768,21 @@ async function exportDoc(sendDoc: TransferEntry, docName: string, passphrase: st
delete (v as any)?._rev; delete (v as any)?._rev;
} }
let decrypted_children = // let decrypted_children =
passphrase == "" // passphrase == ""
? children // ? children
: ( // : (
await Promise.allSettled( // await Promise.allSettled(
children.map(async (e: any) => { // children.map(async (e: any) => {
e.data = await decrypt(e.data, passphrase); // e.data = await decrypt(e.data, passphrase);
return e; // return e;
}) // })
) // )
).map((e) => (e.status == "fulfilled" ? e.value : null)); // ).map((e) => (e.status == "fulfilled" ? e.value : null));
const dirName = path.dirname(writePath); const dirName = path.dirname(writePath);
log(`doc:${docName}: Exporting to ${writePath}`); log(`doc:${docName}: Exporting to ${writePath}`);
await fs.mkdir(dirName, { recursive: true }); await fs.mkdir(dirName, { recursive: true });
const dt_plain = decrypted_children.map((e) => e.data).join(""); const dt_plain = children.map((e: any) => e.data).join("");
const mtime = sendDoc.mtime ?? new Date().getTime(); const mtime = sendDoc.mtime ?? new Date().getTime();
addTouchedFile(writePath, mtime); addTouchedFile(writePath, mtime);
@ -651,4 +856,4 @@ async function main() {
} }
} }
main().then((_) => { }); main().then((_) => {});

1
src/lib Submodule

@ -0,0 +1 @@
Subproject commit ae989051b5fd55db3e9a6b9facf45ee72c0d48c0

View File

@ -1,5 +1,10 @@
const pouchdb_src = require("pouchdb-core")
const pouchdb_src = require("pouchdb-core").plugin(require("pouchdb-find")).plugin(require("pouchdb-adapter-leveldb")).plugin(require("pouchdb-adapter-http")).plugin(require("pouchdb-mapreduce")).plugin(require("pouchdb-replication")); .plugin(require("pouchdb-find"))
.plugin(require("pouchdb-adapter-leveldb"))
.plugin(require("pouchdb-adapter-http"))
.plugin(require("pouchdb-mapreduce"))
.plugin(require("pouchdb-replication"))
.plugin(require("transform-pouch"));
const PouchDB = pouchdb_src; const PouchDB = pouchdb_src;
/** /**
* @type {PouchDB.Static<>} * @type {PouchDB.Static<>}