Skip to content

Commit

Permalink
More fine tuning of cache storage-related code
Browse files Browse the repository at this point in the history
  • Loading branch information
gorhill committed Feb 28, 2024
1 parent a9211cf commit 79ea85d
Show file tree
Hide file tree
Showing 4 changed files with 138 additions and 80 deletions.
9 changes: 9 additions & 0 deletions platform/chromium/webext.js
Original file line number Diff line number Diff line change
Expand Up @@ -156,6 +156,15 @@ if ( chrome.storage.sync instanceof Object ) {
};
}

// https://developer.mozilla.org/en-US/docs/Mozilla/Add-ons/WebExtensions/API/storage/session
webext.storage.session = {
clear: ( ) => Promise.resolve(),
get: ( ) => Promise.resolve(),
getBytesInUse: ( ) => Promise.resolve(),
remove: ( ) => Promise.resolve(),
set: ( ) => Promise.resolve(),
};

// https://bugs.chromium.org/p/chromium/issues/detail?id=608854
if ( chrome.tabs.removeCSS instanceof Function ) {
webext.tabs.removeCSS = promisifyNoFail(chrome.tabs, 'removeCSS');
Expand Down
2 changes: 1 addition & 1 deletion src/js/assets.js
Original file line number Diff line number Diff line change
Expand Up @@ -838,7 +838,7 @@ async function assetCacheSetDetails(assetKey, details) {
}
}
if ( modified ) {
saveAssetCacheRegistry();
saveAssetCacheRegistry(3);
}
}

Expand Down
118 changes: 81 additions & 37 deletions src/js/cachestorage.js
Original file line number Diff line number Diff line change
Expand Up @@ -58,17 +58,19 @@ const shouldCache = bin => {
return out;
};

const missingKeys = (wanted, inbin, outbin) => {
inbin = inbin || {};
const found = Object.keys(inbin);
Object.assign(outbin, inbin);
if ( found.length === wanted.length ) { return; }
const missing = [];
for ( const key of wanted ) {
if ( outbin.hasOwnProperty(key) ) { continue; }
missing.push(key);
}
return missing;
const exGet = (api, wanted, outbin) => {
return api.get(wanted).then(inbin => {
inbin = inbin || {};
const found = Object.keys(inbin);
Object.assign(outbin, inbin);
if ( found.length === wanted.length ) { return; }
const missing = [];
for ( const key of wanted ) {
if ( outbin.hasOwnProperty(key) ) { continue; }
missing.push(key);
}
return missing;
});
};

/*******************************************************************************
Expand All @@ -81,15 +83,15 @@ const missingKeys = (wanted, inbin, outbin) => {

const cacheStorage = (( ) => {

const compress = async (key, data) => {
const compress = async (bin, key, data) => {
const µbhs = µb.hiddenSettings;
const isLarge = typeof data === 'string' &&
data.length >= µbhs.cacheStorageCompressionThreshold;
const after = await scuo.serializeAsync(data, {
compress: isLarge && µbhs.cacheStorageCompression,
multithreaded: isLarge && µbhs.cacheStorageMultithread || 2,
multithreaded: µbhs.cacheStorageMultithread,
});
return { key, data: after };
bin[key] = after;
};

const decompress = async (bin, key) => {
Expand All @@ -98,27 +100,24 @@ const cacheStorage = (( ) => {
const µbhs = µb.hiddenSettings;
const isLarge = data.length >= µbhs.cacheStorageCompressionThreshold;
bin[key] = await scuo.deserializeAsync(data, {
multithreaded: isLarge && µbhs.cacheStorageMultithread || 2,
multithreaded: isLarge && µbhs.cacheStorageMultithread || 1,
});
};

return {
get(argbin) {
const outbin = {};
const wanted0 = keysFromGetArg(argbin);
return cacheAPI.get(wanted0).then(bin => {
const wanted1 = missingKeys(wanted0, bin, outbin);
if ( wanted1 === undefined ) { return; }
return extensionStorage.get(wanted1).then(bin => {
const wanted2 = missingKeys(wanted1, bin, outbin);
if ( wanted2 === undefined ) { return; }
if ( argbin instanceof Object === false ) { return; }
if ( Array.isArray(argbin) ) { return; }
for ( const key of wanted2 ) {
if ( argbin.hasOwnProperty(key) === false ) { continue; }
outbin[key] = argbin[key];
}
});
return exGet(cacheAPI, keysFromGetArg(argbin), outbin).then(wanted => {
if ( wanted === undefined ) { return; }
return exGet(extensionStorage, wanted, outbin);
}).then(wanted => {
if ( wanted === undefined ) { return; }
if ( argbin instanceof Object === false ) { return; }
if ( Array.isArray(argbin) ) { return; }
for ( const key of wanted ) {
if ( argbin.hasOwnProperty(key) === false ) { continue; }
outbin[key] = argbin[key];
}
}).then(( ) => {
const promises = [];
for ( const key of Object.keys(outbin) ) {
Expand Down Expand Up @@ -147,17 +146,14 @@ const cacheStorage = (( ) => {
async set(keyvalStore) {
const keys = Object.keys(keyvalStore);
if ( keys.length === 0 ) { return; }
const bin = {};
const promises = [];
for ( const key of keys ) {
promises.push(compress(key, keyvalStore[key]));
promises.push(compress(bin, key, keyvalStore[key]));
}
const results = await Promise.all(promises);
const serializedStore = {};
for ( const { key, data } of results ) {
serializedStore[key] = data;
}
cacheAPI.set(shouldCache(serializedStore));
return extensionStorage.set(serializedStore).catch(reason => {
await Promise.all(promises);
cacheAPI.set(shouldCache(bin));
return extensionStorage.set(bin).catch(reason => {
ubolog(reason);
});
},
Expand Down Expand Up @@ -361,6 +357,54 @@ const cacheAPI = (( ) => {
};
})();

/*******************************************************************************
*
* In-memory storage
*
* */

const memoryStorage = (( ) => {

const sessionStorage = webext.storage.session;

return {
get(...args) {
return sessionStorage.get(...args).catch(reason => {
ubolog(reason);
});
},

async keys(regex) {
const results = await sessionStorage.get(null).catch(( ) => {});
const keys = new Set(results[0]);
const bin = results[1] || {};
for ( const key of Object.keys(bin) ) {
if ( regex && regex.test(key) === false ) { continue; }
keys.add(key);
}
return keys;
},

async set(...args) {
return sessionStorage.set(...args).catch(reason => {
ubolog(reason);
});
},

remove(...args) {
return sessionStorage.remove(...args).catch(reason => {
ubolog(reason);
});
},

clear(...args) {
return sessionStorage.clear(...args).catch(reason => {
ubolog(reason);
});
},
};
})();

/*******************************************************************************
*
* IndexedDB
Expand Down
89 changes: 47 additions & 42 deletions src/js/scuo-serializer.js
Original file line number Diff line number Diff line change
Expand Up @@ -1148,10 +1148,11 @@ const THREAD_DESERIALIZE = 4;

class MainThread {
constructor() {
this.name = 'main';
this.jobs = [];
this.workload = 0;
this.timer = undefined;
this.busy = false;
this.busy = 2;
}

process() {
Expand All @@ -1164,7 +1165,9 @@ class MainThread {
job.resolve(result);
this.processAsync();
if ( this.jobs.length === 0 ) {
this.busy = false;
this.busy = 2;
} else if ( this.busy > 2 ) {
this.busy -= 1;
}
}

Expand All @@ -1174,11 +1177,12 @@ class MainThread {
this.timer = globalThis.requestIdleCallback(deadline => {
this.timer = undefined;
globalThis.queueMicrotask(( ) => {
this.timer = undefined;
this.process();
});
this.busy = deadline.timeRemaining() === 0;
}, { timeout: 7 });
if ( deadline.timeRemaining() === 0 ) {
this.busy += 1;
}
}, { timeout: 5 });
}

serialize(data, options) {
Expand All @@ -1199,16 +1203,17 @@ class MainThread {
}

get queueSize() {
return this.jobs.length + 1;
return this.jobs.length;
}

get workSize() {
return this.busy ? Number.MAX_SAFE_INTEGER : this.workload * 2;
return this.workload * this.busy;
}
}

class Thread {
constructor(gcer) {
this.name = 'worker';
this.jobs = new Map();
this.jobIdGenerator = 1;
this.workload = 0;
Expand Down Expand Up @@ -1263,7 +1268,10 @@ class Thread {
}

onmessage(ev) {
const job = ev.data;
this.ondone(ev.data);
}

ondone(job) {
const resolve = this.jobs.get(job.id);
if ( resolve === undefined ) { return; }
this.jobs.delete(job.id);
Expand All @@ -1274,37 +1282,35 @@ class Thread {
}

async serialize(data, options) {
this.workerAccessTime = Date.now();
const worker = await this.workerPromise;
if ( worker === null ) {
const result = serialize(data, options);
this.countdownWorker();
return result;
}
const id = this.jobIdGenerator++;
return new Promise(resolve => {
const id = this.jobIdGenerator++;
this.workload += 1;
const job = { what: THREAD_SERIALIZE, id, data, options, size: 1 };
this.jobs.set(job.id, resolve);
worker.postMessage(job);
this.jobs.set(id, resolve);
return this.workerPromise.then(worker => {
this.workerAccessTime = Date.now();
if ( worker === null ) {
this.ondone({ id, result: serialize(data, options), size: 1 });
} else {
worker.postMessage({ what: THREAD_SERIALIZE, id, data, options, size: 1 });
}
});
});
}

async deserialize(data, options) {
this.workerAccessTime = Date.now();
const worker = await this.workerPromise;
if ( worker === null ) {
const result = deserialize(data, options);
this.countdownWorker();
return result;
}
const id = this.jobIdGenerator++;
return new Promise(resolve => {
const id = this.jobIdGenerator++;
const size = data.length;
this.workload += size;
const job = { what: THREAD_DESERIALIZE, id, data, options, size };
this.jobs.set(job.id, resolve);
worker.postMessage(job);
this.jobs.set(id, resolve);
return this.workerPromise.then(worker => {
this.workerAccessTime = Date.now();
if ( worker === null ) {
this.ondone({ id, result: deserialize(data, options), size });
} else {
worker.postMessage({ what: THREAD_DESERIALIZE, id, data, options, size });
}
});
});
}

Expand All @@ -1323,12 +1329,11 @@ const threads = {
const poolSize = this.pool.length;
if ( poolSize !== 0 && poolSize >= maxPoolSize ) {
if ( poolSize === 1 ) { return this.pool[0]; }
return this.pool.reduce((best, candidate) => {
if ( candidate.queueSize === 0 ) { return candidate; }
if ( best.queueSize === 0 ) { return best; }
return candidate.workSize < best.workSize
? candidate
: best;
return this.pool.reduce((a, b) => {
//console.log(`${a.name}: q=${a.queueSize} w=${a.workSize} ${b.name}: q=${b.queueSize} w=${b.workSize}`);
if ( b.queueSize === 0 ) { return b; }
if ( a.queueSize === 0 ) { return a; }
return b.workSize < a.workSize ? b : a;
});
}
const thread = new Thread(thread => {
Expand All @@ -1346,9 +1351,9 @@ export async function serializeAsync(data, options = {}) {
if ( maxThreadCount === 0 ) {
return serialize(data, options);
}
const result = await threads
.thread(maxThreadCount)
.serialize(data, options);
const thread = threads.thread(maxThreadCount);
//console.log(`serializeAsync: thread=${thread.name} workload=${thread.workSize}`);
const result = await thread.serialize(data, options);
if ( result !== undefined ) { return result; }
return serialize(data, options);
}
Expand All @@ -1359,9 +1364,9 @@ export async function deserializeAsync(data, options = {}) {
if ( maxThreadCount === 0 ) {
return deserialize(data, options);
}
const result = await threads
.thread(maxThreadCount)
.deserialize(data, options);
const thread = threads.thread(maxThreadCount);
//console.log(`deserializeAsync: thread=${thread.name} data=${data.length} workload=${thread.workSize}`);
const result = await thread.deserialize(data, options);
if ( result !== undefined ) { return result; }
return deserialize(data, options);
}
Expand Down

0 comments on commit 79ea85d

Please sign in to comment.