mirror of
https://github.com/HeyPuter/puter.git
synced 2025-02-02 14:18:43 +08:00
feat: hash-based distributed cache inval
This commit is contained in:
parent
eb7494ce55
commit
d386096467
@ -23,11 +23,13 @@ const { NodeUIDSelector } = require("../node/selectors");
|
||||
const { UploadProgressTracker } = require("../storage/UploadProgressTracker");
|
||||
const FSNodeContext = require("../FSNodeContext");
|
||||
const APIError = require("../../api/APIError");
|
||||
const { progress_stream, stuck_detector_stream } = require("../../util/streamutil");
|
||||
const { progress_stream, stuck_detector_stream, hashing_stream } = require("../../util/streamutil");
|
||||
const { OperationFrame } = require("../../services/OperationTraceService");
|
||||
const { Actor } = require("../../services/auth/Actor");
|
||||
const { DB_WRITE } = require("../../services/database/consts");
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
const STUCK_STATUS_TIMEOUT = 10 * 1000;
|
||||
const STUCK_ALARM_TIMEOUT = 20 * 1000;
|
||||
|
||||
@ -98,6 +100,25 @@ class LLWriteBase extends LLFilesystemOperation {
|
||||
file = { ...file, stream, };
|
||||
}
|
||||
|
||||
let hashPromise;
|
||||
if ( file.buffer ) {
|
||||
const hash = crypto.createHash('sha256');
|
||||
hash.update(file.buffer);
|
||||
hashPromise = Promise.resolve(hash.digest('hex'));
|
||||
} else {
|
||||
const hs = hashing_stream(file.stream);
|
||||
file.stream = hs.stream;
|
||||
hashPromise = hs.hashPromise;
|
||||
}
|
||||
|
||||
hashPromise.then(hash => {
|
||||
const svc_event = Context.get('services').get('event');
|
||||
console.log('\x1B[36;1m[fs.write]', uuid, hash);
|
||||
svc_event.emit('outer.fs.write-hash', {
|
||||
hash, uuid,
|
||||
});
|
||||
});
|
||||
|
||||
const state_upload = storage.create_upload();
|
||||
|
||||
try {
|
||||
|
@ -95,7 +95,7 @@ class BroadcastService extends BaseService {
|
||||
JSON.stringify(data)
|
||||
);
|
||||
}
|
||||
|
||||
|
||||
meta.from_outside = true;
|
||||
const context = Context.get(undefined, { allow_fallback: true });
|
||||
context.arun(async () => {
|
||||
|
@ -23,6 +23,8 @@ const { pausing_tee } = require("../../util/streamutil");
|
||||
const putility = require("@heyputer/putility");
|
||||
const { EWMA } = require("../../util/opmath");
|
||||
|
||||
const crypto = require('crypto');
|
||||
|
||||
/**
|
||||
* FileCacheService
|
||||
*
|
||||
@ -62,13 +64,14 @@ class FileCacheService extends AdvancedBase {
|
||||
|
||||
this.log = services.get('log-service').create(this.constructor.name);
|
||||
this.errors = services.get('error-service').create(this.log);
|
||||
this.services = services;
|
||||
|
||||
this.disk_limit = my_config.disk_limit;
|
||||
this.disk_max_size = my_config.disk_max_size;
|
||||
this.precache_size = my_config.precache_size;
|
||||
this.path = my_config.path;
|
||||
|
||||
this.ttl = my_config.ttl || (5 * 1000);
|
||||
this.ttl = my_config.ttl || (60 * 1000);
|
||||
|
||||
this.precache = new Map();
|
||||
this.uid_to_tracker = new Map();
|
||||
@ -132,6 +135,17 @@ class FileCacheService extends AdvancedBase {
|
||||
const { fs } = this.modules;
|
||||
// Ensure storage path exists
|
||||
await fs.promises.mkdir(this.path, { recursive: true });
|
||||
|
||||
// Distributed cache invalidation
|
||||
const svc_event = this.services.get('event');
|
||||
svc_event.on('outer.fs.write-hash', async (_, { uuid, hash }) => {
|
||||
const tracker = this.uid_to_tracker.get(uuid);
|
||||
if ( ! tracker ) return;
|
||||
|
||||
if ( tracker.hash !== hash ) {
|
||||
await this.invalidate(uuid);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
_get_path (uid) {
|
||||
@ -262,13 +276,16 @@ class FileCacheService extends AdvancedBase {
|
||||
|
||||
(async () => {
|
||||
let offset = 0;
|
||||
const hash = crypto.createHash('sha256');
|
||||
for await (const chunk of store_stream) {
|
||||
chunk.copy(data, offset);
|
||||
hash.update(chunk);
|
||||
offset += chunk.length;
|
||||
}
|
||||
|
||||
await this._precache_make_room(size);
|
||||
this.precache.set(key, data);
|
||||
tracker.hash = hash.digest('hex');
|
||||
tracker.phase = FileTracker.PHASE_PRECACHE;
|
||||
tracker.p_ready.resolve();
|
||||
})()
|
||||
@ -288,8 +305,11 @@ class FileCacheService extends AdvancedBase {
|
||||
* the precache and disk storage, ensuring that any references to this file are cleaned up.
|
||||
* If the file is not found in the cache, the method does nothing.
|
||||
*/
|
||||
async invalidate (fsNode) {
|
||||
const key = await fsNode.get('uid');
|
||||
async invalidate (fsNode_or_uid) {
|
||||
const key = (typeof fsNode_or_uid === 'string')
|
||||
? fsNode_or_uid
|
||||
: await fsNode_or_uid.get('uid');
|
||||
|
||||
if ( ! this.uid_to_tracker.has(key) ) return;
|
||||
const tracker = this.uid_to_tracker.get(key);
|
||||
if ( tracker.phase === FileTracker.PHASE_PRECACHE ) {
|
||||
|
@ -18,6 +18,7 @@
|
||||
*/
|
||||
const { PassThrough, Readable, Transform } = require('stream');
|
||||
const { TeePromise } = require('@heyputer/putility').libs.promise;
|
||||
const crypto = require('crypto');
|
||||
|
||||
class StreamBuffer extends TeePromise {
|
||||
constructor () {
|
||||
@ -475,6 +476,31 @@ const buffer_to_stream = (buffer) => {
|
||||
return stream;
|
||||
};
|
||||
|
||||
const hashing_stream = (source) => {
|
||||
const hash = crypto.createHash('sha256');
|
||||
const stream = new Transform({
|
||||
transform(chunk, encoding, callback) {
|
||||
hash.update(chunk);
|
||||
this.push(chunk);
|
||||
callback();
|
||||
}
|
||||
});
|
||||
|
||||
source.pipe(stream);
|
||||
|
||||
const hashPromise = new Promise((resolve, reject) => {
|
||||
source.on('end', () => {
|
||||
resolve(hash.digest('hex'));
|
||||
});
|
||||
source.on('error', reject);
|
||||
});
|
||||
|
||||
return {
|
||||
stream,
|
||||
hashPromise,
|
||||
};
|
||||
};
|
||||
|
||||
module.exports = {
|
||||
StreamBuffer,
|
||||
stream_to_the_void,
|
||||
@ -488,4 +514,5 @@ module.exports = {
|
||||
chunk_stream,
|
||||
stream_to_buffer,
|
||||
buffer_to_stream,
|
||||
hashing_stream,
|
||||
};
|
||||
|
Loading…
Reference in New Issue
Block a user