Merge pull request #42 from iantsch/fetch-canonical-parallel

Fetch canonical compressed in parallel
This commit is contained in:
Mario Zechner 2023-05-30 13:57:14 +02:00 committed by GitHub
commit 74ce151c7d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 24 additions and 6 deletions

2
.gitignore vendored
View File

@ -12,4 +12,4 @@ billa-2020.csv
report.html
spar-2020.csv
tmp-dir/
site/latest-canonical-compressed.json
site/latest-canonical*.json

View File

@ -3,6 +3,8 @@ const stores = require("./stores");
const STORE_KEYS = Object.keys(stores);
exports.STORE_KEYS = STORE_KEYS;
function currentDate() {
const currentDate = new Date();
const year = currentDate.getFullYear();

View File

@ -5,7 +5,10 @@ function copyItemsToSite(dataDir) {
fs.copyFileSync(`${dataDir}/latest-canonical.json`, `site/latest-canonical.json`);
const items = JSON.parse(fs.readFileSync(`${dataDir}/latest-canonical.json`));
const compressedItems = analysis.compress(items);
fs.writeFileSync(`site/latest-canonical-compressed.json`, JSON.stringify(compressedItems));
for (const store of analysis.STORE_KEYS) {
const storeItems = items.filter(item => item.store === store);
fs.writeFileSync(`site/latest-canonical.${store}.compressed.json`, JSON.stringify(analysis.compress(storeItems)));
}
}
(async () => {

View File

@ -153,10 +153,23 @@ function decompress(compressedItems) {
async function loadItems() {
now = performance.now();
const response = await fetch("latest-canonical-compressed.json");
const compressedItems = await response.json();
const items = decompress(compressedItems);
console.log("Loading compressed items took " + (performance.now() - now) / 1000 + " secs");
const compressedItemsPerStore = [];
for (const store of STORE_KEYS) {
compressedItemsPerStore.push(new Promise(async (resolve) => {
const now = performance.now();
try {
const response = await fetch(`latest-canonical.${store}.compressed.json`);
const json = await response.json();
console.log(`Loading compressed items for ${store} took ${((performance.now() - now) / 1000)} secs`);
resolve(decompress(json));
} catch {
console.log(`Error while loading compressed items for ${store}. It took ${((performance.now() - now) / 1000)} secs, continueing...`);
resolve([]);
}
}));
}
const items = [].concat(...await Promise.all(compressedItemsPerStore));
console.log("Loading compressed items in parallel took " + (performance.now() - now) / 1000 + " secs");
now = performance.now();
for (const item of items) {