Files
epg/scripts/commands/cluster/load.js

78 lines
2.2 KiB
JavaScript
Raw Normal View History

2022-01-14 17:52:44 +03:00
const _ = require('lodash')
2022-07-06 20:00:49 +03:00
const { EPGGrabber, Channel } = require('epg-grabber')
2022-01-06 16:01:23 +03:00
const { program } = require('commander')
2022-02-26 23:21:44 +03:00
const { db, logger, timer, file, parser } = require('../../core')
2022-06-16 21:58:15 +03:00
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
dayjs.extend(utc)
2022-01-06 16:01:23 +03:00
const options = program
.requiredOption('-c, --cluster-id <cluster-id>', 'The ID of cluster to load', parser.parseNumber)
2022-01-14 17:52:44 +03:00
.option('--delay <delay>', 'Delay between requests (in mileseconds)', parser.parseNumber)
.option(
'-t, --timeout <timeout>',
'Set a timeout for each request (in mileseconds)',
parser.parseNumber
)
.option('--debug', 'Enable debug mode', false)
2022-01-06 16:01:23 +03:00
.parse(process.argv)
.opts()
2022-01-09 20:14:41 +03:00
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs'
2022-02-26 23:21:44 +03:00
const CLUSTER_PATH = `${LOGS_DIR}/cluster/load/cluster_${options.clusterId}.log`
2022-01-06 16:01:23 +03:00
async function main() {
logger.info('Starting...')
timer.start()
logger.info(`Loading cluster: ${options.clusterId}`)
2022-01-30 23:11:56 +03:00
logger.info(`Creating '${CLUSTER_PATH}'...`)
await file.create(CLUSTER_PATH)
2022-01-30 01:34:09 +03:00
await db.queue.load()
2022-02-01 02:27:58 +03:00
let items = await db.queue.find({ cluster_id: options.clusterId })
2022-06-16 21:58:15 +03:00
items = _.orderBy(items, [i => i.channel.id.toLowerCase(), 'date'])
2022-01-30 23:11:56 +03:00
const total = items.length
2022-01-06 16:01:23 +03:00
logger.info('Loading...')
const results = {}
let i = 1
2022-03-29 15:58:37 +03:00
let config = require(file.resolve(items[0].configPath))
config = _.merge(config, {
debug: options.debug,
delay: options.delay,
request: {
2022-04-16 23:06:33 +03:00
timeout: options.timeout
2022-03-29 15:58:37 +03:00
}
})
const grabber = new EPGGrabber(config)
for (const item of items) {
2022-07-06 20:00:49 +03:00
const channel = new Channel(item.channel)
await grabber.grab(channel, item.date, async (data, err) => {
2022-01-06 16:01:23 +03:00
logger.info(
2022-07-06 20:00:49 +03:00
`[${i}/${total}] ${channel.site} (${channel.lang}) - ${channel.id} - ${dayjs
2022-06-16 21:58:15 +03:00
.utc(data.date)
.format('MMM D, YYYY')} (${data.programs.length} programs)`
2022-01-06 16:01:23 +03:00
)
if (err) logger.error(err.message)
2022-01-21 18:00:33 +03:00
const result = {
2022-01-30 23:11:56 +03:00
_qid: item._id,
2022-01-21 18:00:33 +03:00
programs: data.programs,
error: err ? err.message : null
}
2022-01-30 23:11:56 +03:00
await file.append(CLUSTER_PATH, JSON.stringify(result) + '\n')
2022-01-21 18:00:33 +03:00
2022-01-06 16:01:23 +03:00
if (i < total) i++
})
}
2022-01-30 01:34:09 +03:00
db.queue.compact()
2022-01-16 01:33:40 +03:00
2022-01-06 16:01:23 +03:00
logger.info(`Done in ${timer.format('HH[h] mm[m] ss[s]')}`)
}
main()