diff --git a/scripts/commands/api/load.sh b/scripts/commands/api/load.sh index 133d2eac0..b26a0c3c2 100755 --- a/scripts/commands/api/load.sh +++ b/scripts/commands/api/load.sh @@ -3,10 +3,8 @@ mkdir -p scripts/data curl -L -o scripts/data/blocklist.json https://iptv-org.github.io/api/blocklist.json curl -L -o scripts/data/categories.json https://iptv-org.github.io/api/categories.json -curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json -curl -L -o scripts/data/streams.json https://iptv-org.github.io/api/streams.json +curl -L -o scripts/data/channels.json https://iptv-org.github.io/api/channels.json curl -L -o scripts/data/countries.json https://iptv-org.github.io/api/countries.json -curl -L -o scripts/data/guides.json https://iptv-org.github.io/api/guides.json curl -L -o scripts/data/languages.json https://iptv-org.github.io/api/languages.json curl -L -o scripts/data/regions.json https://iptv-org.github.io/api/regions.json curl -L -o scripts/data/subdivisions.json https://iptv-org.github.io/api/subdivisions.json \ No newline at end of file diff --git a/scripts/commands/cluster/load.js b/scripts/commands/cluster/load.js deleted file mode 100644 index 7dd1ecc8e..000000000 --- a/scripts/commands/cluster/load.js +++ /dev/null @@ -1,65 +0,0 @@ -const { db, logger, timer, checker, store, file, parser } = require('../../core') -const { program } = require('commander') - -const options = program - .requiredOption('-c, --cluster-id ', 'The ID of cluster to load', parser.parseNumber) - .option('-t, --timeout ', 'Set timeout for each request', parser.parseNumber, 60000) - .option('-d, --delay ', 'Set delay for each request', parser.parseNumber, 0) - .option('--debug', 'Enable debug mode', false) - .parse(process.argv) - .opts() - -const config = { - timeout: options.timeout, - delay: options.delay, - debug: options.debug -} - -const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load' - -async function main() { - logger.info('starting...') - logger.info(`timeout: ${options.timeout}ms`) - logger.info(`delay: ${options.delay}ms`) - timer.start() - - const clusterLog = `${LOGS_DIR}/cluster_${options.clusterId}.log` - logger.info(`loading cluster: ${options.clusterId}`) - logger.info(`creating '${clusterLog}'...`) - await file.create(clusterLog) - await db.streams.load() - const items = await db.streams.find({ cluster_id: options.clusterId }) - const total = items.length - logger.info(`found ${total} links`) - - logger.info('checking...') - const results = {} - for (const [i, item] of items.entries()) { - const message = `[${i + 1}/${total}] ${item.filepath}: ${item.url}` - const request = { - _id: item._id, - url: item.url, - http: { - referrer: item.http_referrer, - 'user-agent': item.user_agent - } - } - const result = await checker.check(request, config) - if (!result.error) { - logger.info(message) - } else { - logger.info(`${message} (${result.error.message})`) - } - const output = { - _id: result._id, - error: result.error, - streams: result.streams, - requests: result.requests - } - await file.append(clusterLog, JSON.stringify(output) + '\n') - } - - logger.info(`done in ${timer.format('HH[h] mm[m] ss[s]')}`) -} - -main() diff --git a/scripts/commands/database/clear.js b/scripts/commands/database/clear.js deleted file mode 100644 index c1658ac48..000000000 --- a/scripts/commands/database/clear.js +++ /dev/null @@ -1,47 +0,0 @@ -const { logger, parser, db, date } = require('../../core') -const { program } = require('commander') - -const options = program - .option( - '-t, --threshold ', - 'Number of days after which the stream should be deleted', - parser.parseNumber, - 7 - ) - .option('--input-dir ', 'Set path to input directory', 'streams') - .parse(process.argv) - .opts() - -async function main() { - await db.streams.load() - - const streams = await db.streams.all() - - let buffer = {} - let removed = 0 - logger.info('searching...') - for (const stream of streams) { - if ( - stream.status === 'error' && - date.utc().diff(stream.updated_at, 'day') >= options.threshold - ) { - logger.info(`${stream.url} (offline)`) - removed += await db.streams.remove({ url: stream.url }, { multi: true }) - } - - const key = stream.url.toLowerCase() - if (buffer[key]) { - logger.info(`${stream.url} (duplicate)`) - await db.streams.remove({ _id: stream._id }) - removed++ - } else { - buffer[key] = true - } - } - - await db.streams.compact() - - logger.info(`removed ${removed} streams`) -} - -main() diff --git a/scripts/commands/database/create.js b/scripts/commands/database/create.js index a4de95ee7..ef1edcd9c 100644 --- a/scripts/commands/database/create.js +++ b/scripts/commands/database/create.js @@ -1,34 +1,25 @@ -const { db, file, parser, store, logger, id, api } = require('../../core') +const { db, file, parser, store, logger, api } = require('../../core') const { program } = require('commander') const _ = require('lodash') const options = program - .option( - '--max-clusters ', - 'Set maximum number of clusters', - parser.parseNumber, - 256 - ) .option('--input-dir ', 'Set path to input directory', 'streams') .parse(process.argv) .opts() async function main() { logger.info('starting...') - logger.info(`number of clusters: ${options.maxClusters}`) await saveToDatabase(await findStreams()) - - logger.info('done') } main() async function findStreams() { - logger.info(`looking for streams...`) - + logger.info(`loading channels...`) await api.channels.load() - await api.streams.load() + + logger.info(`looking for streams...`) await db.streams.load() const streams = [] @@ -40,7 +31,6 @@ async function findStreams() { const stream = store.create() const channel = await api.channels.find({ id: item.tvg.id }) - const cached = (await api.streams.find({ url: item.url })) || {} stream.set('channel', { channel: channel ? channel.id : null }) stream.set('title', { title: item.name }) @@ -48,14 +38,6 @@ async function findStreams() { stream.set('url', { url: item.url }) stream.set('http_referrer', { http_referrer: item.http.referrer }) stream.set('user_agent', { user_agent: item.http['user-agent'] }) - stream.set('status', { status: cached.status || 'online' }) - stream.set('width', { width: cached.width || 0 }) - stream.set('height', { height: cached.height || 0 }) - stream.set('bitrate', { bitrate: cached.bitrate || 0 }) - stream.set('frame_rate', { frame_rate: cached.frame_rate || 0 }) - stream.set('added_at', { added_at: cached.added_at }) - stream.set('updated_at', { updated_at: cached.updated_at }) - stream.set('checked_at', { checked_at: cached.checked_at }) streams.push(stream) } @@ -69,20 +51,7 @@ async function saveToDatabase(streams = []) { logger.info('saving to the database...') await db.streams.reset() - const chunks = split(_.shuffle(streams), options.maxClusters) - for (const [i, chunk] of chunks.entries()) { - for (const stream of chunk) { - stream.set('cluster_id', { cluster_id: i + 1 }) - - await db.streams.insert(stream.data()) - } + for (const stream of streams) { + await db.streams.insert(stream.data()) } } - -function split(arr, n) { - let result = [] - for (let i = n; i > 0; i--) { - result.push(arr.splice(0, Math.ceil(arr.length / i))) - } - return result -} diff --git a/scripts/commands/database/export.js b/scripts/commands/database/export.js index 9ce347f7b..b4ef737b1 100644 --- a/scripts/commands/database/export.js +++ b/scripts/commands/database/export.js @@ -1,15 +1,12 @@ -const { logger, db, api, file } = require('../../core') +const { logger, db, file } = require('../../core') const _ = require('lodash') -const dayjs = require('dayjs') -const utc = require('dayjs/plugin/utc') -dayjs.extend(utc) const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api' async function main() { - await api.streams.load() + logger.info(`loading streams...`) await db.streams.load() - const now = dayjs.utc().format() + let streams = await db.streams.find({}) streams = _.sortBy(streams, 'channel') streams = streams.map(stream => { @@ -17,36 +14,14 @@ async function main() { channel: stream.channel, url: stream.url, http_referrer: stream.http_referrer, - user_agent: stream.user_agent, - status: stream.status, - width: stream.width, - height: stream.height, - bitrate: stream.bitrate, - frame_rate: stream.frame_rate + user_agent: stream.user_agent } - let addedAt = now - let updatedAt = now - let found = api.streams.find({ url: stream.url }) - if (found) { - data = JSON.parse(JSON.stringify(data)) - normalized = _.omit(found, ['added_at', 'updated_at', 'checked_at']) - if (_.isEqual(data, normalized)) { - addedAt = found.added_at || now - updatedAt = found.updated_at || now - } else { - addedAt = found.added_at || now - updatedAt = now - } - } - - data.added_at = addedAt - data.updated_at = updatedAt - data.checked_at = now - return data }) + logger.info(`found ${streams.length} streams`) + logger.info('saving to .api/streams.json...') await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams)) } diff --git a/scripts/commands/database/matrix.js b/scripts/commands/database/matrix.js deleted file mode 100644 index 576a583d3..000000000 --- a/scripts/commands/database/matrix.js +++ /dev/null @@ -1,16 +0,0 @@ -const { logger, db } = require('../../core') - -async function main() { - await db.streams.load() - const docs = await db.streams.find({}).sort({ cluster_id: 1 }) - const cluster_id = docs.reduce((acc, curr) => { - if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id) - return acc - }, []) - - const matrix = { cluster_id } - const output = `MATRIX=${JSON.stringify(matrix)}` - logger.info(output) -} - -main() diff --git a/scripts/commands/database/update.js b/scripts/commands/database/update.js deleted file mode 100644 index bf49eba1b..000000000 --- a/scripts/commands/database/update.js +++ /dev/null @@ -1,161 +0,0 @@ -const { db, store, parser, file, logger } = require('../../core') -const _ = require('lodash') -const dayjs = require('dayjs') -const utc = require('dayjs/plugin/utc') -dayjs.extend(utc) - -const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load' - -async function main() { - const streams = await loadStreams() - const results = await loadResults() - const origins = await loadOrigins(results) - - await updateStreams(streams, results, origins) -} - -main() - -async function updateStreams(items = [], results = {}, origins = {}) { - logger.info('updating streams...') - - let updated = 0 - const now = dayjs.utc().format() - for (const item of items) { - const stream = store.create(item) - const result = results[item._id] - if (result) { - const status = parseStatus(result.error) - if (status) { - stream.set('status', { status }) - } - - if (result.streams.length) { - const { width, height, bitrate, frame_rate } = parseMediaInfo(result.streams) - stream.set('width', { width }) - stream.set('height', { height }) - stream.set('bitrate', { bitrate }) - stream.set('frame_rate', { frame_rate }) - } - - if (result.requests.length) { - const origin = findOrigin(result.requests, origins) - if (origin) { - stream.set('url', { url: origin }) - } - } - } - - if (stream.changed) { - stream.set('updated_at', { updated_at: now }) - await db.streams.update({ _id: stream.get('_id') }, stream.data()) - updated++ - } - } - - db.streams.compact() - - logger.info(`updated ${updated} streams`) - logger.info('done') -} - -async function loadStreams() { - logger.info('loading streams...') - - await db.streams.load() - const streams = await db.streams.find({}) - - logger.info(`found ${streams.length} streams`) - - return streams -} - -async function loadResults() { - logger.info('loading check results...') - - const results = {} - const files = await file.list(`${LOGS_DIR}/cluster_*.log`) - for (const filepath of files) { - const parsed = await parser.parseLogs(filepath) - for (const item of parsed) { - results[item._id] = item - } - } - - logger.info(`found ${Object.values(results).length} results`) - - return results -} - -async function loadOrigins(results = {}) { - logger.info('loading origins...') - - const origins = {} - for (const { error, requests } of Object.values(results)) { - if (error || !Array.isArray(requests) || !requests.length) continue - - let origin = requests.shift() - origin = new URL(origin.url) - for (const request of requests) { - const curr = new URL(request.url) - const key = curr.href.replace(/(^\w+:|^)/, '') - if (!origins[key] && curr.host === origin.host) { - origins[key] = origin.href - } - } - } - - logger.info(`found ${_.uniq(Object.values(origins)).length} origins`) - - return origins -} - -function findOrigin(requests = [], origins = {}) { - if (origins && Array.isArray(requests)) { - requests = requests.map(r => r.url.replace(/(^\w+:|^)/, '')) - for (const url of requests) { - if (origins[url]) { - return origins[url] - } - } - } - - return null -} - -function parseMediaInfo(streams) { - streams = streams.filter(s => s.codec_type === 'video') - streams = streams.map(s => { - s.bitrate = s.tags && s.tags.variant_bitrate ? parseInt(s.tags.variant_bitrate) : 0 - s.frame_rate = parseFrameRate(s.avg_frame_rate) - - return s - }) - streams = _.orderBy(streams, ['height', 'bitrate'], ['desc', 'desc']) - - return _.head(streams) || {} -} - -function parseFrameRate(frame_rate = '0/0') { - const parts = frame_rate.split('/') - const number = parseInt(parts[0]) / parseInt(parts[1]) - - return number > 0 ? Math.round(number * 100) / 100 : 0 -} - -function parseStatus(error) { - if (!error) return 'online' - - switch (error.code) { - case 'FFMPEG_UNDEFINED': - return null - case 'HTTP_REQUEST_TIMEOUT': - return 'timeout' - case 'HTTP_FORBIDDEN': - case 'HTTP_UNAUTHORIZED': - case 'HTTP_UNAVAILABLE_FOR_LEGAL_REASONS': - return 'blocked' - default: - return 'error' - } -} diff --git a/scripts/commands/playlist/generate.js b/scripts/commands/playlist/generate.js index 4e26c1bb3..f4f5c880c 100644 --- a/scripts/commands/playlist/generate.js +++ b/scripts/commands/playlist/generate.js @@ -32,13 +32,7 @@ main() async function loadStreams() { await db.streams.load() let streams = await db.streams.find({}) - streams = _.filter(streams, stream => stream.status !== 'error') - const levels = { online: 1, blocked: 2, timeout: 3, error: 4, default: 5 } - streams = orderBy( - streams, - ['channel', s => levels[s.status] || levels['default'], 'height', 'frame_rate', 'url'], - ['asc', 'asc', 'desc', 'desc', 'asc'] - ) + streams = orderBy(streams, ['channel', 'url'], ['asc', 'asc']) streams = _.uniqBy(streams, stream => stream.channel || _.uniqueId()) await api.channels.load() @@ -53,10 +47,6 @@ async function loadStreams() { let languages = await api.languages.all() languages = _.keyBy(languages, 'code') - await api.guides.load() - let guides = await api.guides.all() - guides = _.groupBy(guides, 'channel') - streams = streams.map(stream => { const channel = channels[stream.channel] || null const filename = file.getFilename(stream.filepath) @@ -64,14 +54,12 @@ async function loadStreams() { const defaultBroadcastArea = code ? [`c/${code.toUpperCase()}`] : [] if (channel) { - stream.guides = Array.isArray(guides[channel.id]) ? guides[channel.id] : [] stream.categories = channel.categories.map(id => categories[id]).filter(i => i) stream.languages = channel.languages.map(id => languages[id]).filter(i => i) stream.broadcast_area = channel.broadcast_area stream.is_nsfw = channel.is_nsfw stream.logo = channel.logo } else { - stream.guides = [] stream.categories = [] stream.languages = [] stream.broadcast_area = defaultBroadcastArea diff --git a/scripts/commands/playlist/update.js b/scripts/commands/playlist/update.js deleted file mode 100644 index f60411e30..000000000 --- a/scripts/commands/playlist/update.js +++ /dev/null @@ -1,30 +0,0 @@ -const { create: createPlaylist } = require('../../core/playlist') -const { db, logger, file } = require('../../core') -const { orderBy } = require('natural-orderby') -const _ = require('lodash') - -async function main() { - await db.streams.load() - let streams = await db.streams.find({}) - const levels = { online: 1, blocked: 2, timeout: 3, error: 4, default: 5 } - streams = orderBy( - streams, - [ - 'channel', - s => (s.channel ? '' : s.title), - s => levels[s.status] || levels['default'], - 'height', - 'frame_rate', - 'url' - ], - ['asc', 'asc', 'asc', 'desc', 'desc', 'asc'] - ) - - const files = _.groupBy(streams, 'filepath') - for (const filepath in files) { - const playlist = createPlaylist(files[filepath], { public: false }) - await file.create(filepath, playlist.toString()) - } -} - -main()