Update scripts

This commit is contained in:
freearhey
2026-05-04 18:01:51 +03:00
parent 3f6b8267cc
commit e8e47b511a
8 changed files with 353 additions and 178 deletions

View File

@@ -1,43 +0,0 @@
import { ChannelGuideObject } from '../../types/channel'
import { SITES_DIR, API_DIR } from '../../constants'
import { Logger, Collection } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { Storage } from '@freearhey/storage-js'
import { Channel } from '../../models'
import path from 'path'
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR)
const files: string[] = await sitesStorage.list('**/*.channels.xml')
const channels = new Collection<Channel>()
for (const filepath of files) {
const xml = await sitesStorage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.forEach((channel: Channel) => {
channels.add(channel)
})
}
logger.info(`found ${channels.count()} channel(s)`)
const output = channels.map<ChannelGuideObject>((channel: Channel) => channel.getGuideObject())
const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()

View File

@@ -0,0 +1,88 @@
import { SITES_DIR, API_DIR, DATA_DIR } from '../../constants'
import { Logger, Collection } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { Storage } from '@freearhey/storage-js'
import { Channel, Worker, WorkerData } from '../../models'
import path from 'path'
import { ApiGuide } from '../../models/apiGuide'
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const channels = await loadChannels()
logger.info(`found ${channels.count()} channel(s)`)
logger.info('loading workers.json...')
const workers = await loadWorkers()
const guidesMap = workers.reduce((acc, data: WorkerData) => {
const worker = new Worker(data)
if (!worker.channels) return acc
worker.channels.forEach(channel => {
const [channelId, feedId] = channel.xmltv_id.split('@')
const key = [channelId, feedId, channel.site, channel.site_id, channel.lang].join('_')
acc[key] = worker.getGuideSources()
})
return acc
}, {})
logger.info('preparing output...')
const output = channels.map((channel: Channel) => {
const [channelId, feedId] = channel.xmltv_id.split('@')
const key = [channelId, feedId, channel.site, channel.site_id, channel.lang].join('_')
return new ApiGuide({
channel: channelId,
feed: feedId,
site: channel.site,
site_id: channel.site_id,
site_name: channel.name,
lang: channel.lang,
sources: guidesMap[key] || []
})
})
logger.info('saving output...')
const outputFilename = 'guides.json'
await saveOutput(outputFilename, output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()
async function saveOutput(filename: string, json: string) {
const apiStorage = new Storage(API_DIR)
await apiStorage.save(filename, json)
}
async function loadWorkers() {
const dataStorage = new Storage(DATA_DIR)
const workers = await dataStorage.json('workers.json')
return Array.isArray(workers) ? workers : []
}
async function loadChannels() {
const sitesStorage = new Storage(SITES_DIR)
const files: string[] = await sitesStorage.list('**/*.channels.xml')
const channels = new Collection<Channel>()
for (const filepath of files) {
const xml = await sitesStorage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.forEach((channel: Channel) => {
channels.add(channel)
})
}
return channels
}

View File

@@ -1,118 +1,26 @@
import { HTMLTableRow, HTMLTableDataItem, HTMLTableColumn } from '../../types/htmlTable' import { HTMLTableRow, HTMLTableDataItem, HTMLTableColumn } from '../../types/htmlTable'
import epgGrabber, { EPGGrabber } from 'epg-grabber' import { Worker, WorkerData, WorkerGuideSource } from '../../models'
import AxiosMockAdapter from 'axios-mock-adapter' import { DATA_DIR, ROOT_DIR } from '../../constants'
import { Storage } from '@freearhey/storage-js' import { Storage } from '@freearhey/storage-js'
import { Channel, Worker } from '../../models'
import { Collection } from '@freearhey/core' import { Collection } from '@freearhey/core'
import { ROOT_DIR } from '../../constants'
import { Logger } from '@freearhey/core' import { Logger } from '@freearhey/core'
import { HTMLTable } from '../../core' import { HTMLTable } from '../../core'
import epgParser from 'epg-parser'
import axios from 'axios'
async function main() { async function main() {
const logger = new Logger({ level: process.env.NODE_ENV === 'test' ? -999 : 3 }) const logger = new Logger({ level: process.env.NODE_ENV === 'test' ? -999 : 3 })
const rootStorage = new Storage(ROOT_DIR) const dataStorage = new Storage(DATA_DIR)
const workers = new Map<string, Worker>()
logger.info('loading workers.txt...') logger.info('loading workers.json...')
const workersTxt = await rootStorage.load('workers.txt') const workers = await dataStorage.json('workers.json')
workersTxt.split('\r\n').forEach((host: string) => { if (!Array.isArray(workers)) return
if (!host) return
const worker = new Worker({ host })
workers.set(host, worker)
})
for (const worker of workers.values()) {
logger.info(`processing "${worker.host}"...`)
const client = axios.create({
baseURL: worker.getBaseUrl(),
timeout: 60000
})
if (process.env.NODE_ENV === 'test') {
const mock = new AxiosMockAdapter(client)
if (worker.host === 'example.com') {
mock.onGet('worker.json').reply(404)
} else {
const testStorage = new Storage('tests/__data__/input/guides_update')
mock.onGet('worker.json').reply(200, await testStorage.load('worker.json'))
mock.onGet('channels.xml').reply(200, await testStorage.load('channels.xml'))
mock.onGet('guide.xml').reply(200, await testStorage.load('guide.xml'))
}
}
const workerJson = await client
.get('worker.json')
.then(res => res.data)
.catch(err => {
worker.status = err.status
logger.error(err.message)
})
if (!workerJson) {
worker.status = 'MISSING_WORKER_CONFIG'
logger.error('Unable to load "workers.json"')
continue
}
worker.channelsPath = workerJson.channels
worker.guideXmlPath =
typeof workerJson.guide === 'string' ? workerJson.guide : workerJson?.guide?.xml
worker.guideGzipPath = workerJson?.guide?.gzip
worker.guideJsonPath = workerJson?.guide?.json
if (!worker.channelsPath) {
worker.status = 'MISSING_CHANNELS_PATH'
logger.error('The "channels" property is missing from the workers config')
continue
}
if (!worker.guideXmlPath) {
worker.status = 'MISSING_GUIDE_XML_PATH'
logger.error('The "guide" property is missing from the workers config')
continue
}
const channelsXml = await client
.get(worker.channelsPath)
.then(res => res.data)
.catch(err => {
worker.status = err.status
logger.error(err.message)
})
if (!channelsXml) continue
const parsedChannels = EPGGrabber.parseChannelsXML(channelsXml)
worker.channels = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
const guideXml = await client
.get(worker.guideXmlPath)
.then(res => res.data)
.catch(err => {
worker.status = err.status
logger.error(err.message)
})
if (!guideXml) continue
const parsedGuide = epgParser.parse(guideXml)
worker.lastUpdated = parsedGuide.date
worker.status = 'OK'
}
logger.info('creating guides table...') logger.info('creating guides table...')
const rows = new Collection<HTMLTableRow>() const rows = new Collection<HTMLTableRow>()
workers.forEach((worker: Worker) => { workers.forEach((data: WorkerData) => {
const links = worker.getLinks() const worker = new Worker(data)
const sources = worker.getGuideSources()
rows.add( rows.add(
new Collection<HTMLTableDataItem>([ new Collection<HTMLTableDataItem>([
{ value: worker.host }, { value: worker.host },
@@ -120,8 +28,10 @@ async function main() {
{ value: worker.getChannelsCount().toString(), align: 'right' }, { value: worker.getChannelsCount().toString(), align: 'right' },
{ value: worker.getLastUpdated(), align: 'left' }, { value: worker.getLastUpdated(), align: 'left' },
{ {
value: links.length value: sources.length
? links.map(link => `<a href="${link.url}">${link.label}</a>`).join(' | ') ? sources
.map((source: WorkerGuideSource) => `<a href="${source.url}">${source.format}</a>`)
.join(' | ')
: '-' : '-'
} }
]) ])
@@ -129,6 +39,7 @@ async function main() {
}) })
logger.info('updating guides.md...') logger.info('updating guides.md...')
const rootStorage = new Storage(ROOT_DIR)
const table = new HTMLTable( const table = new HTMLTable(
rows, rows,
new Collection<HTMLTableColumn>([ new Collection<HTMLTableColumn>([

View File

@@ -0,0 +1,122 @@
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { DATA_DIR, ROOT_DIR } from '../../constants'
import AxiosMockAdapter from 'axios-mock-adapter'
import { Storage } from '@freearhey/storage-js'
import { Channel, Worker } from '../../models'
import { Collection } from '@freearhey/core'
import { Logger } from '@freearhey/core'
import epgParser from 'epg-parser'
import axios from 'axios'
import path from 'path'
async function main() {
const logger = new Logger({ level: process.env.NODE_ENV === 'test' ? -999 : 3 })
const rootStorage = new Storage(ROOT_DIR)
const workers = new Map<string, Worker>()
logger.info('loading workers.txt...')
const workersTxt = await rootStorage.load('workers.txt')
const hosts = workersTxt.split('\r\n')
hosts.forEach((host: string) => {
if (!host) return
const worker = new Worker({ host })
workers.set(host, worker)
})
for (const worker of workers.values()) {
logger.info(`processing "${worker.host}"...`)
const client = axios.create({
baseURL: worker.getBaseUrl(),
timeout: 60000
})
if (process.env.NODE_ENV === 'test') {
const mock = new AxiosMockAdapter(client)
if (worker.host === 'example.com') {
mock.onGet('worker.json').reply(404)
} else {
const testStorage = new Storage('tests/__data__/input/workers_load')
mock.onGet('worker.json').reply(200, await testStorage.load('worker.json'))
mock.onGet('channels.xml').reply(200, await testStorage.load('channels.xml'))
mock.onGet('guide.xml').reply(200, await testStorage.load('guide.xml'))
}
}
const workerConfig = await client
.get('worker.json')
.then(res => res.data)
.catch(err => {
worker.setStatus(err.status)
logger.error(err.message)
})
if (!workerConfig) {
worker.setStatus('MISSING_WORKER_CONFIG')
logger.error('Unable to load "workers.json"')
continue
}
worker
.setChannelsPath(workerConfig.channels)
.setGuideXmlPath(
typeof workerConfig.guide === 'string' ? workerConfig.guide : workerConfig?.guide?.xml
)
.setGuideGzipPath(workerConfig?.guide?.gzip)
.setGuideJsonPath(workerConfig?.guide?.json)
if (!worker.channelsPath) {
worker.setStatus('MISSING_CHANNELS_PATH')
logger.error('The "channels" property is missing from the workers config')
continue
}
if (!worker.guideXmlPath) {
worker.setStatus('MISSING_GUIDE_XML_PATH')
logger.error('The "guide" property is missing from the workers config')
continue
}
const channelsXml = await client
.get(worker.channelsPath)
.then(res => res.data)
.catch(err => {
worker.status = err.status
logger.error(err.message)
})
if (!channelsXml) continue
const parsedChannels = EPGGrabber.parseChannelsXML(channelsXml)
worker.channels = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
const guideXml = await client
.get(worker.guideXmlPath)
.then(res => res.data)
.catch(err => {
worker.setStatus(err.status)
logger.error(err.message)
})
if (!guideXml) continue
const parsedGuide = epgParser.parse(guideXml)
worker.lastUpdated = parsedGuide.date
worker.setStatus('OK')
}
const output = [...workers.values()]
const dataStorage = new Storage(DATA_DIR)
const outputFilename = 'workers.json'
await dataStorage.save(outputFilename, JSON.stringify(output))
logger.info(`saved to "${path.join(DATA_DIR, outputFilename)}"`)
}
main()

View File

@@ -0,0 +1,49 @@
import { WorkerGuideSource } from './worker'
export interface ApiGuideData {
channel: string | null
feed: string | null
site: string
site_id: string
site_name: string
lang: string | null
sources?: WorkerGuideSource[]
}
export class ApiGuide {
channel: string | null
feed: string | null
site: string
site_id: string
site_name: string
lang: string
sources: WorkerGuideSource[] = []
constructor(data: ApiGuideData) {
this.channel = data.channel || null
this.feed = data.feed || null
this.site = data.site
this.site_id = data.site_id
this.site_name = data.site_name
this.lang = data.lang || 'en'
this.sources = data.sources || []
}
addSource(source: ApiGuideSource): this {
this.sources.push(source)
return this
}
toJSON(): ApiGuideData {
return {
channel: this.channel,
feed: this.feed,
site: this.site,
site_id: this.site_id,
site_name: this.site_name,
lang: this.lang,
sources: this.sources
}
}
}

View File

@@ -1,22 +1,8 @@
import { ChannelGuideObject } from '../types/channel'
import * as epgGrabber from 'epg-grabber' import * as epgGrabber from 'epg-grabber'
import { SITES_DIR } from '../constants' import { SITES_DIR } from '../constants'
import path from 'node:path' import path from 'node:path'
export class Channel extends epgGrabber.Channel { export class Channel extends epgGrabber.Channel {
getGuideObject(): ChannelGuideObject {
const [channelId, feedId] = this.xmltv_id.split('@')
return {
channel: channelId || null,
feed: feedId || null,
site: this.site,
site_id: this.site_id,
site_name: this.name,
lang: this.lang || 'en'
}
}
getConfigPath(): string { getConfigPath(): string {
return path.resolve(SITES_DIR, `${this.site}/${this.site}.config.js`) return path.resolve(SITES_DIR, `${this.site}/${this.site}.config.js`)
} }

View File

@@ -1,5 +1,6 @@
import relativeTime from 'dayjs/plugin/relativeTime' import relativeTime from 'dayjs/plugin/relativeTime'
import { Collection } from '@freearhey/core' import { Collection } from '@freearhey/core'
import * as epgGrabber from 'epg-grabber'
import { Channel } from './channel' import { Channel } from './channel'
import utc from 'dayjs/plugin/utc' import utc from 'dayjs/plugin/utc'
import dayjs from 'dayjs' import dayjs from 'dayjs'
@@ -7,22 +8,79 @@ import dayjs from 'dayjs'
dayjs.extend(relativeTime) dayjs.extend(relativeTime)
dayjs.extend(utc) dayjs.extend(utc)
export interface WorkerGuideSource {
host: string
format: string
url: string
}
export interface WorkerData { export interface WorkerData {
host: string host: string
channelsPath?: string
channels?: epgGrabber.Types.ChannelData[]
guideXmlPath?: string
guideGzipPath?: string
guideJsonPath?: string
status?: string
lastUpdated?: string
} }
export class Worker { export class Worker {
host: string host: string
channelsPath?: string channelsPath?: string
channels?: Collection<Channel>
guideXmlPath?: string guideXmlPath?: string
guideGzipPath?: string guideGzipPath?: string
guideJsonPath?: string guideJsonPath?: string
channels?: Collection<Channel>
status?: string status?: string
lastUpdated?: string lastUpdated?: string
constructor(data: WorkerData) { constructor(data: WorkerData) {
this.host = data.host this.host = data.host
this.update(data)
}
update(data: Partial<WorkerData>): this {
if (data.host) this.host = data.host
if (data.channelsPath) this.channelsPath = data.channelsPath
if (data.guideXmlPath) this.guideXmlPath = data.guideXmlPath
if (data.guideGzipPath) this.guideGzipPath = data.guideGzipPath
if (data.guideJsonPath) this.guideJsonPath = data.guideJsonPath
if (data.status) this.status = data.status
if (data.lastUpdated) this.lastUpdated = data.lastUpdated
if (data.channels) {
const channelInstances = data.channels.map(c => new Channel(c))
this.channels = new Collection(channelInstances)
}
return this
}
setChannelsPath(path: string): this {
this.channelsPath = path
return this
}
setGuideXmlPath(path: string): this {
this.guideXmlPath = path
return this
}
setGuideGzipPath(path: string): this {
this.guideGzipPath = path
return this
}
setGuideJsonPath(path: string): this {
this.guideJsonPath = path
return this
}
setStatus(status: string): this {
this.status = status
return this
} }
getBaseUrl(): string { getBaseUrl(): string {
@@ -83,30 +141,42 @@ export class Worker {
getLastUpdated(): string { getLastUpdated(): string {
if (!this.lastUpdated) return '-' if (!this.lastUpdated) return '-'
let now = dayjs() const now = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
if (process.env.NODE_ENV === 'test') now = dayjs.utc('2026-02-13')
return dayjs.utc(this.lastUpdated).from(now) return dayjs.utc(this.lastUpdated).from(now)
} }
getLinks(): { url: string; label: string }[] { getGuideSources(): WorkerGuideSource[] {
const links = [] const sources = []
if (this.guideXmlPath) { if (this.guideXmlPath) {
const url = new URL(this.guideXmlPath, this.getBaseUrl()) const url = new URL(this.guideXmlPath, this.getBaseUrl())
links.push({ url: url.href, label: 'XML' }) sources.push({ host: this.host, url: url.href, format: 'XML' })
} }
if (this.guideGzipPath) { if (this.guideGzipPath) {
const url = new URL(this.guideGzipPath, this.getBaseUrl()) const url = new URL(this.guideGzipPath, this.getBaseUrl())
links.push({ url: url.href, label: 'GZIP' }) sources.push({ host: this.host, url: url.href, format: 'GZIP' })
} }
if (this.guideJsonPath) { if (this.guideJsonPath) {
const url = new URL(this.guideJsonPath, this.getBaseUrl()) const url = new URL(this.guideJsonPath, this.getBaseUrl())
links.push({ url: url.href, label: 'JSON' }) sources.push({ host: this.host, url: url.href, format: 'JSON' })
} }
return links return sources
}
toJSON(): WorkerData {
return {
host: this.host,
channelsPath: this.channelsPath,
channels: this.channels ? this.channels.map(c => c.toObject()).all() : [],
guideXmlPath: this.guideXmlPath,
guideGzipPath: this.guideGzipPath,
guideJsonPath: this.guideJsonPath,
status: this.status,
lastUpdated: this.lastUpdated
}
} }
} }

View File

@@ -1,8 +0,0 @@
export interface ChannelGuideObject {
channel: string | null
feed: string | null
site: string
site_id: string
site_name: string
lang: string
}