mirror of
https://github.com/badlogic/heissepreise.git
synced 2024-07-01 21:05:49 +02:00
Fix scheduling of data fetch, do it daily at 7am instead of every 24h after launching the server.
This commit is contained in:
parent
8c202de9a6
commit
1fec42bfe1
84
index.js
84
index.js
|
@ -2,44 +2,66 @@ const fs = require("fs");
|
||||||
const analysis = require("./analysis");
|
const analysis = require("./analysis");
|
||||||
|
|
||||||
function copyItemsToSite(dataDir) {
|
function copyItemsToSite(dataDir) {
|
||||||
fs.copyFileSync(`${dataDir}/latest-canonical.json`, `site/latest-canonical.json`);
|
fs.copyFileSync(`${dataDir}/latest-canonical.json`, `site/latest-canonical.json`);
|
||||||
const items = JSON.parse(fs.readFileSync(`${dataDir}/latest-canonical.json`));
|
const items = JSON.parse(fs.readFileSync(`${dataDir}/latest-canonical.json`));
|
||||||
for (const store of analysis.STORE_KEYS) {
|
for (const store of analysis.STORE_KEYS) {
|
||||||
const storeItems = items.filter(item => item.store === store);
|
const storeItems = items.filter(item => item.store === store);
|
||||||
fs.writeFileSync(`site/latest-canonical.${store}.compressed.json`, JSON.stringify(analysis.compress(storeItems)));
|
fs.writeFileSync(`site/latest-canonical.${store}.compressed.json`, JSON.stringify(analysis.compress(storeItems)));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function scheduleFunction(hour, minute, second, func) {
|
||||||
|
const now = new Date();
|
||||||
|
|
||||||
|
const scheduledTime = new Date();
|
||||||
|
scheduledTime.setHours(hour);
|
||||||
|
scheduledTime.setMinutes(minute);
|
||||||
|
scheduledTime.setSeconds(second);
|
||||||
|
|
||||||
|
if (now > scheduledTime) {
|
||||||
|
scheduledTime.setDate(scheduledTime.getDate() + 1);
|
||||||
|
}
|
||||||
|
const delay = scheduledTime.getTime() - now.getTime();
|
||||||
|
|
||||||
|
console.log("Scheduling next function call: " + scheduledTime.toString());
|
||||||
|
|
||||||
|
setTimeout(async () => {
|
||||||
|
await func();
|
||||||
|
scheduleFunction(hour, minute, second, func);
|
||||||
|
}, delay);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
(async () => {
|
(async () => {
|
||||||
const dataDir = 'data';
|
const dataDir = 'data';
|
||||||
|
|
||||||
if (!fs.existsSync(dataDir)) {
|
if (!fs.existsSync(dataDir)) {
|
||||||
fs.mkdirSync(dataDir)
|
fs.mkdirSync(dataDir)
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fs.existsSync(`${dataDir}/latest-canonical.json`)) {
|
if (fs.existsSync(`${dataDir}/latest-canonical.json`)) {
|
||||||
copyItemsToSite(dataDir);
|
copyItemsToSite(dataDir);
|
||||||
analysis.updateData(dataDir, (_newItems) => {
|
analysis.updateData(dataDir, (_newItems) => {
|
||||||
copyItemsToSite(dataDir);
|
copyItemsToSite(dataDir);
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
await analysis.updateData(dataDir)
|
||||||
|
copyItemsToSite(dataDir);
|
||||||
|
}
|
||||||
|
scheduleFunction(7, 0, 0, async () => {
|
||||||
|
items = await analysis.updateData(dataDir)
|
||||||
|
copyItemsToSite(dataDir);
|
||||||
});
|
});
|
||||||
} else {
|
|
||||||
await analysis.updateData(dataDir)
|
|
||||||
copyItemsToSite(dataDir);
|
|
||||||
}
|
|
||||||
setInterval(async () => {
|
|
||||||
items = await analysis.updateData(dataDir)
|
|
||||||
copyItemsToSite(dataDir);
|
|
||||||
}, 1000 * 60 * 60 * 24);
|
|
||||||
|
|
||||||
const express = require('express')
|
const express = require('express')
|
||||||
const compression = require('compression');
|
const compression = require('compression');
|
||||||
const app = express()
|
const app = express()
|
||||||
const port = process?.argv?.[2] ?? 3000
|
const port = process?.argv?.[2] ?? 3000
|
||||||
|
|
||||||
app.use(compression());
|
app.use(compression());
|
||||||
app.use(express.static('site'));
|
app.use(express.static('site'));
|
||||||
|
|
||||||
app.listen(port, () => {
|
app.listen(port, () => {
|
||||||
console.log(`Example app listening on port ${port}`)
|
console.log(`Example app listening on port ${port}`)
|
||||||
})
|
})
|
||||||
})();
|
})();
|
Loading…
Reference in New Issue
Block a user