Updated as like 0.14.5

This commit is contained in:
vorotamoroz 2022-09-05 17:25:31 +09:00
parent 5935ea73c5
commit 6c2eb73c29
3 changed files with 16 additions and 20 deletions

View File

@ -8,7 +8,8 @@
"passphrase": "passphrase_of_private_vault" "passphrase": "passphrase_of_private_vault"
}, },
"path": "shared/", "path": "shared/",
"initialScan": false "initialScan": false,
"customChunkSize": 100
}, },
"local": { "local": {
"path": "./vault", "path": "./vault",
@ -18,4 +19,4 @@
"auto_reconnect": true, "auto_reconnect": true,
"sync_on_connect": true "sync_on_connect": true
} }
} }

View File

@ -84,13 +84,14 @@ function triggerProcessor(procs: string) {
} }
const hashCaches = new LRUCache(); const hashCaches = new LRUCache();
async function putDBEntry(note: LoadedEntry, passphrase: string, saveAsBigChunk: boolean, database: PouchDB.Database<NewEntry | PlainEntry | EntryLeaf>) { async function putDBEntry(note: LoadedEntry, passphrase: string, saveAsBigChunk: boolean, customChunkSize: number, database: PouchDB.Database<NewEntry | PlainEntry | EntryLeaf>) {
// let leftData = note.data; // let leftData = note.data;
const savenNotes = []; const savenNotes = [];
let processed = 0; let processed = 0;
let made = 0; let made = 0;
let skiped = 0; let skiped = 0;
let pieceSize = MAX_DOC_SIZE_BIN; const maxChunkSize = MAX_DOC_SIZE_BIN * Math.max(customChunkSize, 1);
let pieceSize = maxChunkSize;
let plainSplit = false; let plainSplit = false;
let cacheUsed = 0; let cacheUsed = 0;
const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase)); const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase));
@ -99,22 +100,12 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, saveAsBigChunk:
plainSplit = true; plainSplit = true;
} }
const minimumChunkSize = Math.min(Math.max(40, ~~(note.data.length / 100)), maxChunkSize);
if (pieceSize < minimumChunkSize) pieceSize = minimumChunkSize;
const newLeafs: EntryLeaf[] = []; const newLeafs: EntryLeaf[] = [];
// To keep low bandwith and database size,
// Dedup pieces on database.
// from 0.1.10, for best performance. we use markdown delimiters
// 1. \n[^\n]{longLineThreshold}[^\n]*\n -> long sentence shuld break.
// 2. \n\n shold break
// 3. \r\n\r\n should break
// 4. \n# should break.
let minimumChunkSize = 20; //default
if (minimumChunkSize < 10) minimumChunkSize = 10;
let longLineThreshold = 250; //default
if (longLineThreshold < 100) longLineThreshold = 100;
//benchmarhk const pieces = splitPieces2(note.data, pieceSize, plainSplit, minimumChunkSize, 0);
const pieces = splitPieces2(note.data, pieceSize, plainSplit, minimumChunkSize, longLineThreshold);
for (const piece of pieces()) { for (const piece of pieces()) {
processed++; processed++;
let leafid = ""; let leafid = "";
@ -258,6 +249,7 @@ async function eachProc(syncKey: string, config: eachConf) {
const exportPath = config.local?.path ?? ""; const exportPath = config.local?.path ?? "";
const processor = config.local?.processor ?? ""; const processor = config.local?.processor ?? "";
const deleteMetadataOfDeletedFiles = config.deleteMetadataOfDeletedFiles ?? false; const deleteMetadataOfDeletedFiles = config.deleteMetadataOfDeletedFiles ?? false;
const customChunkSize = config.server.customChunkSize ?? 0;
const remote = new PouchDB(serverURI, { auth: serverAuth }); const remote = new PouchDB(serverURI, { auth: serverAuth });
if (serverAuth.passphrase != "") { if (serverAuth.passphrase != "") {
@ -334,7 +326,8 @@ async function eachProc(syncKey: string, config: eachConf) {
fromDB: remote, fromDB: remote,
fromPrefix: serverPath, fromPrefix: serverPath,
passphrase: serverAuth.passphrase, passphrase: serverAuth.passphrase,
deleteMetadataOfDeletedFiles: deleteMetadataOfDeletedFiles deleteMetadataOfDeletedFiles: deleteMetadataOfDeletedFiles,
customChunkSize: customChunkSize
}; };
function storagePathToVaultPath(strStoragePath: string) { function storagePathToVaultPath(strStoragePath: string) {
@ -384,7 +377,7 @@ async function eachProc(syncKey: string, config: eachConf) {
data: content, data: content,
type: datatype, type: datatype,
}; };
let ret = await putDBEntry(newNote, conf.passphrase, saveAsBigChunk, remote as PouchDB.Database<NewEntry | PlainEntry | EntryLeaf>); let ret = await putDBEntry(newNote, conf.passphrase, saveAsBigChunk, customChunkSize, remote as PouchDB.Database<NewEntry | PlainEntry | EntryLeaf>);
if (ret) { if (ret) {
addTouchedFile(pathSrc, 0); addTouchedFile(pathSrc, 0);
addKnownFile(conf.syncKey, ret.id, ret.rev); addKnownFile(conf.syncKey, ret.id, ret.rev);

View File

@ -15,6 +15,7 @@ export interface config {
}; };
path: string; path: string;
initialScan: boolean; initialScan: boolean;
customChunkSize?: number;
} }
export interface localConfig { export interface localConfig {
@ -40,6 +41,7 @@ export interface connectConfig {
fromPrefix: string; fromPrefix: string;
passphrase: string; passphrase: string;
deleteMetadataOfDeletedFiles: boolean; deleteMetadataOfDeletedFiles: boolean;
customChunkSize: number;
} }