Merge branch 'vrtmrz:master' into master

This commit is contained in:
Spencer Heywood 2022-04-15 10:59:13 -06:00 committed by GitHub
commit 9b9dca1c67
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
10 changed files with 692 additions and 137 deletions

View File

@ -7,13 +7,15 @@
"password": "password_of_private_vault",
"passphrase": "passphrase_of_private_vault"
},
"path": "shared/"
},
"local": {
"path": "./export",
"processor": "utils/build.sh",
"path": "shared/",
"initialScan": false
},
"auto_reconnect": true
"local": {
"path": "./vault",
"--processor": "utils/build.sh",
"initialScan": false
},
"auto_reconnect": true,
"sync_on_connect": true
}
}

21
license Normal file
View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2021 vorotamoroz
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

338
package-lock.json generated
View File

@ -14,6 +14,7 @@
"pouchdb-adapter-http": "^7.2.2",
"pouchdb-adapter-leveldb": "^7.2.2",
"pouchdb-core": "^7.2.2",
"pouchdb-find": "^7.3.0",
"pouchdb-mapreduce": "^7.2.2",
"pouchdb-node": "^7.2.2",
"pouchdb-replication": "^7.2.2",
@ -976,6 +977,163 @@
"node-fetch": "2.6.0"
}
},
"node_modules/pouchdb-find": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-find/-/pouchdb-find-7.3.0.tgz",
"integrity": "sha512-EwhnfyxCAkKf8PG4tfndTTygEmtuz+o1LiZkxfPrflfXA3m1jo1ithib0hwBYtEwEYWuZxH6B8pRZutbLoQCGA==",
"dependencies": {
"pouchdb-abstract-mapreduce": "7.3.0",
"pouchdb-collate": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-fetch": "7.3.0",
"pouchdb-md5": "7.3.0",
"pouchdb-selector-core": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"node_modules/pouchdb-find/node_modules/buffer-from": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
},
"node_modules/pouchdb-find/node_modules/fetch-cookie": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/fetch-cookie/-/fetch-cookie-0.11.0.tgz",
"integrity": "sha512-BQm7iZLFhMWFy5CZ/162sAGjBfdNWb7a8LEqqnzsHFhxT/X/SVj/z2t2nu3aJvjlbQkrAlTUApplPRjWyH4mhA==",
"dependencies": {
"tough-cookie": "^2.3.3 || ^3.0.1 || ^4.0.0"
},
"engines": {
"node": ">=8"
}
},
"node_modules/pouchdb-find/node_modules/node-fetch": {
"version": "2.6.7",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
"dependencies": {
"whatwg-url": "^5.0.0"
},
"engines": {
"node": "4.x || >=6.0.0"
},
"peerDependencies": {
"encoding": "^0.1.0"
},
"peerDependenciesMeta": {
"encoding": {
"optional": true
}
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-abstract-mapreduce": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.3.0.tgz",
"integrity": "sha512-+2fVt3SDh7D776lIGbYZOsKX5js1aUyUw7iJaTGitxSdQ2ObWSTrr3SUrj5Qo1CkgPXwRM3Tdoq/53JYAa2qCA==",
"dependencies": {
"pouchdb-binary-utils": "7.3.0",
"pouchdb-collate": "7.3.0",
"pouchdb-collections": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-fetch": "7.3.0",
"pouchdb-mapreduce-utils": "7.3.0",
"pouchdb-md5": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-binary-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-binary-utils/-/pouchdb-binary-utils-7.3.0.tgz",
"integrity": "sha512-xvBH/XGHGcou2vkEzszJxkCc7YElfRUrkLUg51Jbdmh1mogLDUO0bU3Tj6TOIIJfRkQrU/HV+dDkMAhsil0amQ==",
"dependencies": {
"buffer-from": "1.1.2"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-collate": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-collate/-/pouchdb-collate-7.3.0.tgz",
"integrity": "sha512-ys7rXKtEr6cfghgUjknwFJiOkITebV6JmeTybJKCzMV0r2luXu0OoPQsKVpE/wbM/3F5LxfpbFKGFpPcfGMvTA=="
},
"node_modules/pouchdb-find/node_modules/pouchdb-collections": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-collections/-/pouchdb-collections-7.3.0.tgz",
"integrity": "sha512-Xr54m2+fErShXn+qAT4xwqJ+8NwddNPeTMJT4z4k1sZsrwfHmZsWbsKAyGPMF04eQaaU+7DDRMciu2VzaBUXyg=="
},
"node_modules/pouchdb-find/node_modules/pouchdb-errors": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-errors/-/pouchdb-errors-7.3.0.tgz",
"integrity": "sha512-dTBbIC1BbCy6J9W/Csg5xROgb3wJN3HpbgAJHHSEtAkb8oA45KZmU3ZwEpNhf0AfPuQm4XgW1936PvlDlGgJiw==",
"dependencies": {
"inherits": "2.0.4"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-fetch": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-fetch/-/pouchdb-fetch-7.3.0.tgz",
"integrity": "sha512-8/lcg8iMDG+GVs1dHNXA4ktJSEpH71dHU3xesMJ25tNQOqfAaaWrkfz9j71ZYDDkveLYE6UjUzl/sDacu2hSjw==",
"dependencies": {
"abort-controller": "3.0.0",
"fetch-cookie": "0.11.0",
"node-fetch": "2.6.7"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-mapreduce-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.3.0.tgz",
"integrity": "sha512-KDVSd+H2r+XWTrQfKWV71SknDDYRjYXoeWs0ZQl3xITHCcTl+fIgqyagg/XN+Zy/U9LeLPGMe2JdgPx9H8lJgw==",
"dependencies": {
"argsarray": "0.0.1",
"inherits": "2.0.4",
"pouchdb-collections": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-md5": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-md5/-/pouchdb-md5-7.3.0.tgz",
"integrity": "sha512-wL04QgoKyd/L/TV5gxgcvlEyCJiZoXCOEFJklTzkdza/kBQNJGPH7i0ZhKa7Sb+AvZYoWZHddf1Zgv7rBScHkA==",
"dependencies": {
"pouchdb-binary-utils": "7.3.0",
"spark-md5": "3.0.2"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-selector-core": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-selector-core/-/pouchdb-selector-core-7.3.0.tgz",
"integrity": "sha512-sK/cCrIGeL9ImcMhKGcwa54+bzX7Wv4hhVV+oUW3T1Nasaoxh+Muem1GuA+x1+SbTCE8y37rUg8i6DIOhX51ew==",
"dependencies": {
"pouchdb-collate": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"node_modules/pouchdb-find/node_modules/pouchdb-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-utils/-/pouchdb-utils-7.3.0.tgz",
"integrity": "sha512-HH+5IXXWn/ZgVCSnrlydBMYn6MabT7RS7SNoo9w8qVH9efpZSp3eLchw6yMQNLw8LQefWmbbskiHV9VgJmSVWQ==",
"dependencies": {
"argsarray": "0.0.1",
"clone-buffer": "1.0.0",
"immediate": "3.3.0",
"inherits": "2.0.4",
"pouchdb-collections": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-md5": "7.3.0",
"uuid": "8.3.2"
}
},
"node_modules/pouchdb-find/node_modules/spark-md5": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz",
"integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw=="
},
"node_modules/pouchdb-find/node_modules/uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==",
"bin": {
"uuid": "dist/bin/uuid"
}
},
"node_modules/pouchdb-generate-replication-id": {
"version": "7.2.2",
"integrity": "sha512-kBr9jTM3/qEQQDhraXdIhhy+OSi18X6pMJnWCSaT43194XuWZltnjH1Hty0aJ0U9s1UanyxqZwrb7wJT6QUpzg==",
@ -1229,6 +1387,11 @@
"node": ">=6"
}
},
"node_modules/tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
},
"node_modules/ts-node": {
"version": "10.5.0",
"integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==",
@ -1309,6 +1472,20 @@
"version": "1.0.3",
"integrity": "sha1-O+FF5YJxxzylUnndhR8SpoIRSws="
},
"node_modules/webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE="
},
"node_modules/whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=",
"dependencies": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"node_modules/write-stream": {
"version": "0.4.3",
"integrity": "sha1-g8yMA0fQr2BXqThitOOuAd5cgcE=",
@ -2104,6 +2281,148 @@
"node-fetch": "2.6.0"
}
},
"pouchdb-find": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-find/-/pouchdb-find-7.3.0.tgz",
"integrity": "sha512-EwhnfyxCAkKf8PG4tfndTTygEmtuz+o1LiZkxfPrflfXA3m1jo1ithib0hwBYtEwEYWuZxH6B8pRZutbLoQCGA==",
"requires": {
"pouchdb-abstract-mapreduce": "7.3.0",
"pouchdb-collate": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-fetch": "7.3.0",
"pouchdb-md5": "7.3.0",
"pouchdb-selector-core": "7.3.0",
"pouchdb-utils": "7.3.0"
},
"dependencies": {
"buffer-from": {
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/buffer-from/-/buffer-from-1.1.2.tgz",
"integrity": "sha512-E+XQCRwSbaaiChtv6k6Dwgc+bx+Bs6vuKJHHl5kox/BaKbhiXzqQOwK4cO22yElGp2OCmjwVhT3HmxgyPGnJfQ=="
},
"fetch-cookie": {
"version": "0.11.0",
"resolved": "https://registry.npmjs.org/fetch-cookie/-/fetch-cookie-0.11.0.tgz",
"integrity": "sha512-BQm7iZLFhMWFy5CZ/162sAGjBfdNWb7a8LEqqnzsHFhxT/X/SVj/z2t2nu3aJvjlbQkrAlTUApplPRjWyH4mhA==",
"requires": {
"tough-cookie": "^2.3.3 || ^3.0.1 || ^4.0.0"
}
},
"node-fetch": {
"version": "2.6.7",
"resolved": "https://registry.npmjs.org/node-fetch/-/node-fetch-2.6.7.tgz",
"integrity": "sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ==",
"requires": {
"whatwg-url": "^5.0.0"
}
},
"pouchdb-abstract-mapreduce": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-abstract-mapreduce/-/pouchdb-abstract-mapreduce-7.3.0.tgz",
"integrity": "sha512-+2fVt3SDh7D776lIGbYZOsKX5js1aUyUw7iJaTGitxSdQ2ObWSTrr3SUrj5Qo1CkgPXwRM3Tdoq/53JYAa2qCA==",
"requires": {
"pouchdb-binary-utils": "7.3.0",
"pouchdb-collate": "7.3.0",
"pouchdb-collections": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-fetch": "7.3.0",
"pouchdb-mapreduce-utils": "7.3.0",
"pouchdb-md5": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"pouchdb-binary-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-binary-utils/-/pouchdb-binary-utils-7.3.0.tgz",
"integrity": "sha512-xvBH/XGHGcou2vkEzszJxkCc7YElfRUrkLUg51Jbdmh1mogLDUO0bU3Tj6TOIIJfRkQrU/HV+dDkMAhsil0amQ==",
"requires": {
"buffer-from": "1.1.2"
}
},
"pouchdb-collate": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-collate/-/pouchdb-collate-7.3.0.tgz",
"integrity": "sha512-ys7rXKtEr6cfghgUjknwFJiOkITebV6JmeTybJKCzMV0r2luXu0OoPQsKVpE/wbM/3F5LxfpbFKGFpPcfGMvTA=="
},
"pouchdb-collections": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-collections/-/pouchdb-collections-7.3.0.tgz",
"integrity": "sha512-Xr54m2+fErShXn+qAT4xwqJ+8NwddNPeTMJT4z4k1sZsrwfHmZsWbsKAyGPMF04eQaaU+7DDRMciu2VzaBUXyg=="
},
"pouchdb-errors": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-errors/-/pouchdb-errors-7.3.0.tgz",
"integrity": "sha512-dTBbIC1BbCy6J9W/Csg5xROgb3wJN3HpbgAJHHSEtAkb8oA45KZmU3ZwEpNhf0AfPuQm4XgW1936PvlDlGgJiw==",
"requires": {
"inherits": "2.0.4"
}
},
"pouchdb-fetch": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-fetch/-/pouchdb-fetch-7.3.0.tgz",
"integrity": "sha512-8/lcg8iMDG+GVs1dHNXA4ktJSEpH71dHU3xesMJ25tNQOqfAaaWrkfz9j71ZYDDkveLYE6UjUzl/sDacu2hSjw==",
"requires": {
"abort-controller": "3.0.0",
"fetch-cookie": "0.11.0",
"node-fetch": "2.6.7"
}
},
"pouchdb-mapreduce-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-mapreduce-utils/-/pouchdb-mapreduce-utils-7.3.0.tgz",
"integrity": "sha512-KDVSd+H2r+XWTrQfKWV71SknDDYRjYXoeWs0ZQl3xITHCcTl+fIgqyagg/XN+Zy/U9LeLPGMe2JdgPx9H8lJgw==",
"requires": {
"argsarray": "0.0.1",
"inherits": "2.0.4",
"pouchdb-collections": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"pouchdb-md5": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-md5/-/pouchdb-md5-7.3.0.tgz",
"integrity": "sha512-wL04QgoKyd/L/TV5gxgcvlEyCJiZoXCOEFJklTzkdza/kBQNJGPH7i0ZhKa7Sb+AvZYoWZHddf1Zgv7rBScHkA==",
"requires": {
"pouchdb-binary-utils": "7.3.0",
"spark-md5": "3.0.2"
}
},
"pouchdb-selector-core": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-selector-core/-/pouchdb-selector-core-7.3.0.tgz",
"integrity": "sha512-sK/cCrIGeL9ImcMhKGcwa54+bzX7Wv4hhVV+oUW3T1Nasaoxh+Muem1GuA+x1+SbTCE8y37rUg8i6DIOhX51ew==",
"requires": {
"pouchdb-collate": "7.3.0",
"pouchdb-utils": "7.3.0"
}
},
"pouchdb-utils": {
"version": "7.3.0",
"resolved": "https://registry.npmjs.org/pouchdb-utils/-/pouchdb-utils-7.3.0.tgz",
"integrity": "sha512-HH+5IXXWn/ZgVCSnrlydBMYn6MabT7RS7SNoo9w8qVH9efpZSp3eLchw6yMQNLw8LQefWmbbskiHV9VgJmSVWQ==",
"requires": {
"argsarray": "0.0.1",
"clone-buffer": "1.0.0",
"immediate": "3.3.0",
"inherits": "2.0.4",
"pouchdb-collections": "7.3.0",
"pouchdb-errors": "7.3.0",
"pouchdb-md5": "7.3.0",
"uuid": "8.3.2"
}
},
"spark-md5": {
"version": "3.0.2",
"resolved": "https://registry.npmjs.org/spark-md5/-/spark-md5-3.0.2.tgz",
"integrity": "sha512-wcFzz9cDfbuqe0FZzfi2or1sgyIrsDwmPwfZC4hiNidPdPINjeUwNfv5kldczoEAcjl9Y1L3SM7Uz2PUEQzxQw=="
},
"uuid": {
"version": "8.3.2",
"resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz",
"integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg=="
}
}
},
"pouchdb-generate-replication-id": {
"version": "7.2.2",
"integrity": "sha512-kBr9jTM3/qEQQDhraXdIhhy+OSi18X6pMJnWCSaT43194XuWZltnjH1Hty0aJ0U9s1UanyxqZwrb7wJT6QUpzg==",
@ -2318,6 +2637,11 @@
"universalify": "^0.1.2"
}
},
"tr46": {
"version": "0.0.3",
"resolved": "https://registry.npmjs.org/tr46/-/tr46-0.0.3.tgz",
"integrity": "sha1-gYT9NH2snNwYWZLzpmIuFLnZq2o="
},
"ts-node": {
"version": "10.5.0",
"integrity": "sha512-6kEJKwVxAJ35W4akuiysfKwKmjkbYxwQMTBaAxo9KKAx/Yd26mPUyhGz3ji+EsJoAgrLqVsYHNuuYwQe22lbtw==",
@ -2364,6 +2688,20 @@
"version": "1.0.3",
"integrity": "sha1-O+FF5YJxxzylUnndhR8SpoIRSws="
},
"webidl-conversions": {
"version": "3.0.1",
"resolved": "https://registry.npmjs.org/webidl-conversions/-/webidl-conversions-3.0.1.tgz",
"integrity": "sha1-JFNCdeKnvGvnvIZhHMFq4KVlSHE="
},
"whatwg-url": {
"version": "5.0.0",
"resolved": "https://registry.npmjs.org/whatwg-url/-/whatwg-url-5.0.0.tgz",
"integrity": "sha1-lmRU6HZUYuN2RNNib2dCzotwll0=",
"requires": {
"tr46": "~0.0.3",
"webidl-conversions": "^3.0.0"
}
},
"write-stream": {
"version": "0.4.3",
"integrity": "sha1-g8yMA0fQr2BXqThitOOuAd5cgcE=",

View File

@ -16,6 +16,7 @@
"pouchdb-adapter-http": "^7.2.2",
"pouchdb-adapter-leveldb": "^7.2.2",
"pouchdb-core": "^7.2.2",
"pouchdb-find": "^7.3.0",
"pouchdb-mapreduce": "^7.2.2",
"pouchdb-node": "^7.2.2",
"pouchdb-replication": "^7.2.2",

46
readme.md Normal file
View File

@ -0,0 +1,46 @@
# FileSystem-LiveSync
The synchronization daemon between filesystem and CouchDB compatible with [Self-hosted LiveSync](https://github.com/vrtmrz/obsidian-livesync).
Notice: **We're on the bleeding edge.** Please make sure to back your vault up!
## How to run
```sh
git clone https://github.com/vrtmrz/filesystem-livesync
cp dat/config.sample.json dat/config.json
# Setting up configuration
vi dat/config.json
npm i -D
npm run dev
```
## Configuration
The configuration file consists of the following structure.
```jsonc
{
// "config_1" is just the name for identifying the connection.
"config_1": {
"server": {
"uri": "http://localhost:5984/private1_vault",
"auth": {
"username": "username_of_private_vault",
"password": "password_of_private_vault",
"passphrase": "passphrase_of_private_vault"
},
"path": "shared/", // All documents under this path will synchronized.
"initialScan": false // If you enable this, all server files will be synchronized to local storage once when daemon has been started.
},
"local": {
"path": "./vault",
"processor": "utils/build.sh", // If you want to run some program after synchronization has been stablized, you can set this.
"initialScan": false // If you enable this, all files on the local storage will be synchronized to server once when daemon has been started.
},
"auto_reconnect": true,
"sync_on_connect": true // This means both server.initialScan + local.initialScan.
}
}
```

View File

@ -1,6 +1,17 @@
import { Logger } from "./logger";
import { LOG_LEVEL } from "./types";
import { webcrypto as crypto } from "crypto";
import { webcrypto as crypto_ } from "crypto";
let webcrypto: Crypto;
if (typeof window !== "undefined" && window.crypto) {
webcrypto = window.crypto;
} else {
const crypto = crypto_;
//@ts-ignore
webcrypto = crypto;
}
console.dir(webcrypto);
export type encodedData = [encryptedData: string, iv: string, salt: string];
export type KeyBuffer = {
@ -30,10 +41,10 @@ export async function getKeyForEncrypt(passphrase: string): Promise<[CryptoKey,
recycleCount = KEY_RECYCLE_COUNT;
}
const xpassphrase = new TextEncoder().encode(passphrase);
const digest = await crypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
const keyMaterial = await crypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
const salt = crypto.getRandomValues(new Uint8Array(16));
const key = await crypto.subtle.deriveKey(
const digest = await webcrypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
const keyMaterial = await webcrypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
const salt = webcrypto.getRandomValues(new Uint8Array(16));
const key = await webcrypto.subtle.deriveKey(
{
name: "PBKDF2",
salt,
@ -63,9 +74,9 @@ export async function getKeyForDecryption(passphrase: string, salt: Uint8Array):
return [f.key, f.salt];
}
const xpassphrase = new TextEncoder().encode(passphrase);
const digest = await crypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
const keyMaterial = await crypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
const key = await crypto.subtle.deriveKey(
const digest = await webcrypto.subtle.digest({ name: "SHA-256" }, xpassphrase);
const keyMaterial = await webcrypto.subtle.importKey("raw", digest, { name: "PBKDF2" }, false, ["deriveKey"]);
const key = await webcrypto.subtle.deriveKey(
{
name: "PBKDF2",
salt,
@ -93,7 +104,7 @@ function getSemiStaticField(reset?: boolean) {
if (semiStaticFieldBuffer != null && !reset) {
return semiStaticFieldBuffer;
}
semiStaticFieldBuffer = crypto.getRandomValues(new Uint8Array(12));
semiStaticFieldBuffer = webcrypto.getRandomValues(new Uint8Array(12));
return semiStaticFieldBuffer;
}
@ -132,7 +143,7 @@ export async function encrypt(input: string, passphrase: string) {
const iv = Uint8Array.from([...fixedPart, ...new Uint8Array(invocationPart.buffer)]);
const plainStringified: string = JSON.stringify(input);
const plainStringBuffer: Uint8Array = new TextEncoder().encode(plainStringified);
const encryptedDataArrayBuffer = await crypto.subtle.encrypt({ name: "AES-GCM", iv }, key, plainStringBuffer);
const encryptedDataArrayBuffer = await webcrypto.subtle.encrypt({ name: "AES-GCM", iv }, key, plainStringBuffer);
const encryptedData = btoa(Array.from(new Uint8Array(encryptedDataArrayBuffer), (char) => String.fromCharCode(char)).join(""));
@ -150,7 +161,7 @@ export async function decrypt(encryptedResult: string, passphrase: string): Prom
// decode base 64, it should increase speed and i should with in MAX_DOC_SIZE_BIN, so it won't OOM.
const encryptedDataBin = atob(encryptedData);
const encryptedDataArrayBuffer = Uint8Array.from(encryptedDataBin.split(""), (char) => char.charCodeAt(0));
const plainStringBuffer: ArrayBuffer = await crypto.subtle.decrypt({ name: "AES-GCM", iv }, key, encryptedDataArrayBuffer);
const plainStringBuffer: ArrayBuffer = await webcrypto.subtle.decrypt({ name: "AES-GCM", iv }, key, encryptedDataArrayBuffer);
const plainStringified = new TextDecoder().decode(plainStringBuffer);
const plain = JSON.parse(plainStringified);
return plain;

View File

@ -1,16 +1,18 @@
import { decrypt, encrypt } from "./e2ee.js";
import chokidar from "chokidar";
//@ts-ignore
import { PouchDB as PouchDB_src } from "./pouchdb.js";
import * as fs from "fs/promises";
import * as path from "path";
import * as util from "util";
import { exec } from "child_process";
import { Stats } from "fs";
import { Logger } from "./logger.js";
import { configFile, connectConfig, eachConf, Entry, EntryLeaf, LoadedEntry, LOG_LEVEL, MAX_DOC_SIZE, MAX_DOC_SIZE_BIN, NewEntry, PlainEntry } from "./types.js";
import { Stats } from "fs";
import { addTouchedFile, isKnownFile, isPlainText, isTouchedFile, path2unix } from "./util.js";
//@ts-ignore
import { PouchDB as PouchDB_src } from "./pouchdb.js";
import { decrypt, encrypt } from "./e2ee.js";
import { configFile, connectConfig, eachConf, Entry, EntryLeaf, LoadedEntry, LOG_LEVEL, MAX_DOC_SIZE, MAX_DOC_SIZE_BIN, NewEntry, PlainEntry, TransferEntry } from "./types.js";
import { addKnownFile, addTouchedFile, calcDateDiff, DATEDIFF_EVEN, DATEDIFF_NEWER_A, DATEDIFF_OLDER_A, isKnownFile, isPlainText, isTouchedFile, path2unix } from "./util.js";
const xxhash = require("xxhash-wasm");
@ -79,25 +81,6 @@ function triggerProcessor(procs: string) {
}, 500);
}
async function main() {
log("LiveSync-classroom starting up.");
let xx = await xxhash();
h32Raw = xx.h32Raw;
h32 = xx.h32ToString;
let config: configFile = JSON.parse((await fs.readFile("./dat/config.json")) + "");
try {
syncStat = JSON.parse((await fs.readFile(statFile)) + "");
} catch (ex) {
log("could not read pervious sync status, initialized.");
syncStat = {};
}
for (const conf of Object.entries(config)) {
setTimeout(() => eachProc(conf[0], conf[1]), 100);
}
}
let hashCache: {
[key: string]: string;
} = {};
@ -171,11 +154,9 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, database: Pouch
let leafid = "";
// Get hash of piece.
let hashedPiece = "";
let needMake = true;
if (typeof hashCache[piece] !== "undefined") {
hashedPiece = "";
leafid = hashCache[piece];
needMake = false;
skiped++;
cacheUsed++;
} else {
@ -251,12 +232,15 @@ async function putDBEntry(note: LoadedEntry, passphrase: string, database: Pouch
throw ex;
}
}
const r = await database.put(newDoc, { force: true });
Logger(`note saved:${newDoc._id}:${r.rev}`);
const ret = await database.put(newDoc, { force: true });
Logger(`note saved:${newDoc._id}:${ret.rev}`);
return ret;
} else {
Logger(`note coud not saved:${note._id}`);
}
}
// Run synchronization for each config
async function eachProc(syncKey: string, config: eachConf) {
log(`${syncKey} started`);
@ -286,7 +270,6 @@ async function eachProc(syncKey: string, config: eachConf) {
log(ex);
process.exit(-1);
}
log("Start Database watching");
function openConnection(e: connectConfig, auto_reconnect: boolean) {
Logger(`Connecting ${e.syncKey} with auto_reconnect:${auto_reconnect}`);
@ -301,7 +284,7 @@ async function eachProc(syncKey: string, config: eachConf) {
},
})
.on("change", async function (change) {
if (change.doc?._id.startsWith(e.fromPrefix) && isVaildDoc(change.doc._id)) {
if (change.doc?._id.indexOf(":") == -1 && change.doc?._id.startsWith(e.fromPrefix) && isVaildDoc(change.doc._id)) {
let x = await transferDoc(e.syncKey, e.fromDB, change.doc, e.fromPrefix, e.passphrase, exportPath);
if (x) {
syncStat[syncKey] = change.seq + "";
@ -333,25 +316,38 @@ async function eachProc(syncKey: string, config: eachConf) {
}
log("start vault watching");
const watcher = chokidar.watch(config.local.path, { ignoreInitial: !config.local.initialScan });
const vaultPath = path.posix.normalize(config.local.path);
const db_add = async (pathSrc: string, stat: Stats) => {
const id = serverPath + path2unix(path.relative(path.resolve(vaultPath), path.resolve(pathSrc)));
const storagePathRoot = path.resolve(config.local.path);
let conf: connectConfig = {
syncKey: syncKey,
fromDB: remote,
fromPrefix: serverPath,
passphrase: serverAuth.passphrase,
};
function storagePathToVaultPath(strStoragePath: string) {
const rel = path.relative(storagePathRoot, strStoragePath);
return path2unix(rel);
}
function vaultPathToStroageABSPath(strVaultPath: string) {
const filePath = path.resolve(path.join(storagePathRoot, strVaultPath));
return filePath;
}
const pushFile = async (pathSrc: string, stat: Stats) => {
const id = serverPath + storagePathToVaultPath(pathSrc);
const docId = id.startsWith("_") ? "/" + id : id;
try {
let doc = (await remote.get(docId)) as NewEntry;
if (doc.mtime) {
const mtime_srv = ~~(doc.mtime / 1000);
const mtime_loc = ~~(stat.mtime.getTime() / 1000);
if (mtime_loc == mtime_srv) {
log(`Should be not modified on ${pathSrc}`);
if (calcDateDiff(doc.mtime, stat.mtime) == DATEDIFF_EVEN) {
return;
}
}
} catch (ex: any) {
if (ex.status && ex.status == 404) {
// NO OP.
log(`${id} -> maybe new`);
} else {
throw ex;
}
@ -360,8 +356,6 @@ async function eachProc(syncKey: string, config: eachConf) {
let datatype: "newnote" | "plain" = "newnote";
const d = await fs.readFile(pathSrc);
if (!isPlainText(pathSrc)) {
// const contentBin = await this.app.vault.readBinary(file);
// content = await arrayBufferToBase64(contentBin);
content = d.toString("base64");
datatype = "newnote";
} else {
@ -378,15 +372,20 @@ async function eachProc(syncKey: string, config: eachConf) {
data: content,
// type: "plain",
};
await putDBEntry(newNote, conf.passphrase, remote as PouchDB.Database<NewEntry | PlainEntry | Entry | EntryLeaf>);
let ret = await putDBEntry(newNote, conf.passphrase, remote as PouchDB.Database<NewEntry | PlainEntry | Entry | EntryLeaf>);
if (ret) {
addTouchedFile(pathSrc, 0);
addKnownFile(conf.syncKey, ret.id, ret.rev);
}
};
const db_delete = async (pathSrc: string) => {
const id = serverPath + path2unix(path.relative(path.resolve(vaultPath), path.resolve(pathSrc)));
const unlinkFile = async (pathSrc: string) => {
const id = serverPath + storagePathToVaultPath(pathSrc);
const docId = id.startsWith("_") ? "/" + id : id;
try {
let oldNote: any = await remote.get(docId);
oldNote._deleted = true;
await remote.put(oldNote);
let ret = await remote.put(oldNote);
addKnownFile(conf.syncKey, ret.id, ret.rev);
addTouchedFile(pathSrc, 0);
} catch (ex: any) {
if (ex.status && ex.status == 404) {
@ -396,44 +395,115 @@ async function eachProc(syncKey: string, config: eachConf) {
}
}
};
// check the document is under the [vault]/[configured_dir]..
function isTargetFile(pathSrc: string): boolean {
if (pathSrc.startsWith(config.server.path)) {
return true;
} else {
return false;
}
}
async function pullFile(id: string, localPath: string) {
let fromDoc = await remote.get(id);
const docName = fromDoc._id.substring(config.server.path.length);
let sendDoc: PouchDB.Core.ExistingDocument<PouchDB.Core.ChangesMeta> & { children?: string[]; type?: string; mtime?: number } = { ...fromDoc, _id: docName.startsWith("_") ? "/" + docName : docName };
if (await exportDoc(sendDoc, docName, serverAuth.passphrase, remote, exportPath)) {
log(`Pull:${localPath}`);
} else {
log(`Failed:${localPath}`);
}
}
if (config.sync_on_connect || config.server.initialScan) {
const dbfiles = await remote.find({ limit: 999999999, selector: { $or: [{ type: "plain" }, { type: "newnote" }] }, fields: ["_id", "mtime"] });
log(`Waiting for initial sync(Database to storage)`);
if (dbfiles.docs) {
for (const doc of dbfiles.docs) {
if (doc._id.indexOf(":") !== -1) continue;
const fn = doc._id.startsWith("/") ? doc._id.substring(1) : doc._id;
if (!isTargetFile(fn)) {
continue;
}
const localPath = fn.substring(config.server.path.length);
const storageNewFilePath = vaultPathToStroageABSPath(localPath);
// log(`Checking initial file:${localPath}`);
// log(`--> file:${storageNewFilePath}`);
const mtime: number = (doc as any).mtime;
try {
const stat = await fs.stat(storageNewFilePath);
const diff = calcDateDiff(stat.mtime, mtime);
if (diff == DATEDIFF_NEWER_A) {
log(`--> ${localPath}`);
await pushFile(storageNewFilePath, stat);
// return;
} else if (diff == DATEDIFF_OLDER_A) {
log(`<-- ${localPath}`);
await pullFile(doc._id, localPath);
} else {
log(`=== ${localPath}`);
}
} catch (ex: any) {
if (ex.code == "ENOENT") {
log(`<<- ${localPath}`);
await pullFile(doc._id, localPath);
// return;
continue;
}
log(`Error on checking file:${localPath}`);
log(`Error:${ex}`);
}
}
log(`Done!`);
}
}
const watcher = chokidar.watch(config.local.path, {
ignoreInitial: !config.local.initialScan && !config.sync_on_connect,
awaitWriteFinish: {
stabilityThreshold: 500,
},
});
watcher.on("change", async (pathSrc: string, stat: Stats) => {
const filePath = pathSrc;
log(`Detected:change:${filePath}`);
const mtime = ~~(stat.mtime.getTime() / 1000);
const mtime = stat.mtime.getTime();
if (isTouchedFile(filePath, mtime)) {
log("Self-detect");
// log(`Self-detected::${filePath}`);
return;
}
log(`Detected:change:${filePath}`);
addTouchedFile(pathSrc, mtime);
await db_add(pathSrc, stat);
await pushFile(pathSrc, stat);
});
watcher.on("unlink", async (pathSrc: string, stat: Stats) => {
const filePath = pathSrc;
log(`Detected:delete:${filePath}`);
if (isTouchedFile(filePath, 0)) {
log("self-detect");
// log(`Self-detected::${filePath}`);
return;
}
await db_delete(pathSrc);
log(`Detected:delete:${filePath}`);
await unlinkFile(pathSrc);
});
watcher.on("add", async (pathSrc: string, stat: Stats) => {
const filePath = pathSrc;
log(`Detected:created:${filePath}`);
const mtime = ~~(stat.mtime.getTime() / 1000);
const mtime = stat.mtime.getTime();
if (isTouchedFile(filePath, mtime)) {
log("Self-detect");
// log(`Self-detected::${filePath}`);
return;
}
log(`Detected:created:${filePath}`);
addTouchedFile(pathSrc, mtime);
await db_add(pathSrc, stat);
await pushFile(pathSrc, stat);
// this.watchVaultChange(path, stat);
});
let conf: connectConfig = {
syncKey: syncKey,
fromDB: remote,
fromPrefix: serverPath,
passphrase: serverAuth.passphrase,
};
log("Start Database watching");
openConnection(conf, config.auto_reconnect ?? false);
}
@ -448,6 +518,82 @@ function isVaildDoc(id: string): boolean {
return true;
}
async function exportDoc(sendDoc: TransferEntry, docName: string, passphrase: string, db: PouchDB.Database, exportPath: string) {
const writePath = path.join(exportPath, docName);
if (sendDoc._deleted) {
log(`doc:${docName}: Deleted, so delete from ${writePath}`);
try {
addTouchedFile(writePath, 0);
await fs.unlink(writePath);
} catch (ex: any) {
if (ex.code == "ENOENT") {
//NO OP
} else {
throw ex;
}
}
return true;
}
if (!sendDoc.children) {
log(`doc:${docName}: Warning! document doesn't have chunks, skipped`);
return false;
}
try {
const stat_init = await fs.stat(writePath);
const mtime = sendDoc.mtime ?? new Date().getTime();
const diff = calcDateDiff(mtime, stat_init.mtime);
if (diff == DATEDIFF_EVEN) {
log(`doc:${docName}: Up to date`);
return true;
}
} catch (ex) {
// WRAP IT
log(ex);
}
let cx = sendDoc.children;
let children = await getChildren(cx, db);
if (children.includes(undefined)) {
log(`doc:${docName}: Warning! there's missing chunks, skipped`);
return false;
}
children = children.filter((e) => !!e);
for (const v of children) {
delete (v as any)?._rev;
}
let decrypted_children =
passphrase == ""
? children
: (
await Promise.allSettled(
children.map(async (e: any) => {
e.data = await decrypt(e.data, passphrase);
return e;
})
)
).map((e) => (e.status == "fulfilled" ? e.value : null));
const dirName = path.dirname(writePath);
log(`doc:${docName}: Exporting to ${writePath}`);
await fs.mkdir(dirName, { recursive: true });
const dt_plain = decrypted_children.map((e) => e.data).join("");
const mtime = sendDoc.mtime ?? new Date().getTime();
addTouchedFile(writePath, mtime);
const tmtime = ~~(mtime / 1000);
if (sendDoc.type == "plain") {
await fs.writeFile(writePath, dt_plain);
await fs.utimes(writePath, tmtime, tmtime);
} else {
const dt_bin = Buffer.from(dt_plain, "base64");
await fs.writeFile(writePath, dt_bin, { encoding: "binary" });
await fs.utimes(writePath, tmtime, tmtime);
}
log(`doc:${docName}: Exported`);
return true;
}
async function transferDoc(syncKey: string, fromDB: PouchDB.Database, fromDoc: PouchDB.Core.ExistingDocument<PouchDB.Core.ChangesMeta>, fromPrefix: string, passphrase: string, exportPath: string): Promise<boolean> {
const docKey = `${syncKey}: ${fromDoc._id} (${fromDoc._rev})`;
while (running[syncKey]) {
@ -464,7 +610,6 @@ async function transferDoc(syncKey: string, fromDB: PouchDB.Database, fromDoc: P
const docName = fromDoc._id.substring(fromPrefix.length);
let sendDoc: PouchDB.Core.ExistingDocument<PouchDB.Core.ChangesMeta> & { children?: string[]; type?: string; mtime?: number } = { ...fromDoc, _id: docName.startsWith("_") ? "/" + docName : docName };
let retry = false;
const userpasswordHash = h32Raw(new TextEncoder().encode(passphrase));
do {
if (retry) {
continue_count--;
@ -474,60 +619,7 @@ async function transferDoc(syncKey: string, fromDB: PouchDB.Database, fromDoc: P
}
await delay(1500);
}
if (sendDoc._deleted && exportPath != "") {
const writePath = path.join(exportPath, docName);
log(`doc:${docKey}: Deleted, so delete from ${writePath}`);
addTouchedFile(writePath, 0);
await fs.unlink(writePath);
}
retry = false;
if (!sendDoc.children) {
log(`doc:${docKey}: Warning! document doesn't have chunks, skipped`);
return false;
}
let cx = sendDoc.children;
let children = await getChildren(cx, fromDB);
if (children.includes(undefined)) {
log(`doc:${docKey}: Warning! there's missing chunks, skipped`);
return false;
} else {
children = children.filter((e) => !!e);
for (const v of children) {
delete (v as any)?._rev;
}
let decrypted_children =
passphrase == ""
? children
: (
await Promise.allSettled(
children.map(async (e: any) => {
e.data = await decrypt(e.data, passphrase);
return e;
})
)
).map((e) => (e.status == "fulfilled" ? e.value : null));
// If exporting is enabled, write contents to the real file.
if (exportPath != "" && !sendDoc._deleted) {
const writePath = path.join(exportPath, docName);
const dirName = path.dirname(writePath);
log(`doc:${docKey}: Exporting to ${writePath}`);
await fs.mkdir(dirName, { recursive: true });
const dt_plain = decrypted_children.map((e) => e.data).join("");
const mtime = sendDoc.mtime ?? new Date().getTime();
const tmtime = ~~(mtime / 1000);
addTouchedFile(writePath, tmtime);
if (sendDoc.type == "plain") {
await fs.writeFile(writePath, dt_plain);
await fs.utimes(writePath, tmtime, tmtime);
} else {
const dt_bin = Buffer.from(dt_plain, "base64");
await fs.writeFile(writePath, dt_bin, { encoding: "binary" });
await fs.utimes(writePath, tmtime, tmtime);
}
}
}
retry = !(await exportDoc(sendDoc, docName, passphrase, fromDB, exportPath));
} while (retry);
} catch (ex) {
log("Exception on transfer doc");
@ -539,4 +631,24 @@ async function transferDoc(syncKey: string, fromDB: PouchDB.Database, fromDoc: P
return false;
}
async function main() {
log("FileSystem-Livesync starting up.");
let xx = await xxhash();
h32Raw = xx.h32Raw;
h32 = xx.h32ToString;
let config: configFile = JSON.parse((await fs.readFile("./dat/config.json")) + "");
try {
syncStat = JSON.parse((await fs.readFile(statFile)) + "");
} catch (ex) {
log("could not read pervious sync status, initialized.");
syncStat = {};
}
// Run each processes
for (const conf of Object.entries(config)) {
setTimeout(() => eachProc(conf[0], conf[1]), 100);
}
}
main().then((_) => {});

View File

@ -1,5 +1,5 @@
const pouchdb_src = require("pouchdb-core").plugin(require("pouchdb-adapter-leveldb")).plugin(require("pouchdb-adapter-http")).plugin(require("pouchdb-mapreduce")).plugin(require("pouchdb-replication"));
const pouchdb_src = require("pouchdb-core").plugin(require("pouchdb-find")).plugin(require("pouchdb-adapter-leveldb")).plugin(require("pouchdb-adapter-http")).plugin(require("pouchdb-mapreduce")).plugin(require("pouchdb-replication"));
const PouchDB = pouchdb_src;
/**
* @type {PouchDB.Static<>}

View File

@ -14,6 +14,7 @@ export interface config {
passphrase: string;
};
path: string;
initialScan: boolean;
}
export interface localConfig {
path: string;
@ -25,6 +26,7 @@ export interface eachConf {
server: config;
local: localConfig;
auto_reconnect?: boolean;
sync_on_connect: boolean;
}
export interface configFile {
@ -91,3 +93,9 @@ export type LoadedEntry = Entry & {
children: string[];
datatype: "plain" | "newnote";
};
export type TransferEntry = PouchDB.Core.ExistingDocument<PouchDB.Core.ChangesMeta> & {
children?: string[];
type?: string;
mtime?: number;
};

View File

@ -25,13 +25,29 @@ export function isKnownFile(syncKey: string, id: string, rev: string) {
return known_files.indexOf(`${syncKey}-${id}-${rev}`) !== -1;
}
export function addTouchedFile(pathSrc: string, mtime: number) {
const rmtime = ~~(mtime / 5000);
const targetFile = path.resolve(pathSrc);
const key = `${targetFile}-${~~(mtime / 10)}`;
const key = `${targetFile}-${rmtime}`;
touchedFile.push(key);
touchedFile = touchedFile.slice(-50);
}
export function isTouchedFile(pathSrc: string, mtime: number) {
const rmtime = ~~(mtime / 5000);
const targetFile = path.resolve(pathSrc);
const key = `${targetFile}-${~~(mtime / 10)}`;
const key = `${targetFile}-${rmtime}`;
return touchedFile.indexOf(key) !== -1;
}
export const DATEDIFF_NEWER_A = 1;
export const DATEDIFF_OLDER_B = 1;
export const DATEDIFF_EVEN = 0;
export const DATEDIFF_OLDER_A = -1;
export const DATEDIFF_NEWER_B = -1;
export type DATEDIFF = 1 | 0 | -1;
export function calcDateDiff(a: number | Date, b: number | Date, resolution = 1000): DATEDIFF {
const da = ~~((typeof a == "number" ? a : a.getTime()) / resolution);
const db = ~~((typeof b == "number" ? b : b.getTime()) / resolution);
if (da == db) return DATEDIFF_EVEN;
const diff = (da - db) / Math.abs(da - db);
return diff as DATEDIFF;
}