mirror of
https://github.com/iptv-org/epg
synced 2025-12-17 02:47:02 -05:00
Update scripts
This commit is contained in:
@@ -1,17 +1,9 @@
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import path from 'path'
|
||||
import { Logger, Collection, Storage } from '@freearhey/core'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { GuideChannel } from '../../models'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
|
||||
type OutputItem = {
|
||||
channel: string | null
|
||||
feed: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
@@ -20,31 +12,24 @@ async function main() {
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({ storage: sitesStorage })
|
||||
const parser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
let files: string[] = []
|
||||
files = await sitesStorage.list('**/*.channels.xml')
|
||||
const files: string[] = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
const channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
const channelList = await parser.parse(filepath)
|
||||
|
||||
channelList.channels.forEach((data: epgGrabber.Channel) => {
|
||||
channels.add(new GuideChannel(data))
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
logger.info(`found ${channels.count()} channel(s)`)
|
||||
|
||||
const output = parsedChannels.map((channel: epgGrabber.Channel): OutputItem => {
|
||||
const xmltv_id = channel.xmltv_id || ''
|
||||
const [channelId, feedId] = xmltv_id.split('@')
|
||||
|
||||
return {
|
||||
channel: channelId || null,
|
||||
feed: feedId || null,
|
||||
site: channel.site || '',
|
||||
site_id: channel.site_id || '',
|
||||
site_name: channel.name,
|
||||
lang: channel.lang || ''
|
||||
}
|
||||
})
|
||||
const output = channels.map((channel: GuideChannel) => channel.toJSON())
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
|
||||
@@ -17,7 +17,8 @@ async function main() {
|
||||
loader.download('feeds.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json')
|
||||
loader.download('streams.json'),
|
||||
loader.download('logos.json')
|
||||
])
|
||||
}
|
||||
|
||||
|
||||
@@ -1,17 +1,17 @@
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, ChannelList, Feed } from '../../models'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { ChannelsParser, XML } from '../../core'
|
||||
import { Channel, Feed } from '../../models'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
|
||||
type ChoiceValue = { type: string; value?: Feed | Channel }
|
||||
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
@@ -34,11 +34,11 @@ program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(proc
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let parsedChannels = new Collection()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath)
|
||||
save(filepath, channelList)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
@@ -51,18 +51,18 @@ export default async function main(filepath: string) {
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { feedsGroupedByChannelId, channels, channelsKeyById }: DataProcessorData =
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const parser = new ChannelsParser({ storage })
|
||||
parsedChannels = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = parsedChannels.filter(
|
||||
channelList = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = channelList.channels.filter(
|
||||
(channel: epgGrabber.Channel) => !channel.xmltv_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`found ${parsedChannels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
@@ -73,10 +73,10 @@ export default async function main(filepath: string) {
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const parsedChannel of parsedChannelsWithoutId.all()) {
|
||||
for (const channel of parsedChannelsWithoutId.all()) {
|
||||
try {
|
||||
parsedChannel.xmltv_id = await selectChannel(
|
||||
parsedChannel,
|
||||
channel.xmltv_id = await selectChannel(
|
||||
channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId,
|
||||
channelsKeyById
|
||||
@@ -124,8 +124,8 @@ async function selectChannel(
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
@@ -153,7 +153,7 @@ async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
@@ -205,10 +205,9 @@ function getFeedChoises(feeds: Collection): Choice[] {
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string) {
|
||||
function save(filepath: string, channelList: ChannelList) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const xml = new XML(parsedChannels)
|
||||
storage.saveSync(filepath, xml.toString())
|
||||
storage.saveSync(filepath, channelList.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
|
||||
@@ -1,8 +1,9 @@
|
||||
import { Logger, File, Collection, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser, XML } from '../../core'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import { Logger, File, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { ChannelList } from '../../models'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
@@ -21,17 +22,25 @@ type ParseOptions = {
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const logger = new Logger()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channels = new Collection()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
channels = await parser.parse(outputFilepath)
|
||||
channelList = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: {
|
||||
@@ -49,45 +58,31 @@ async function main() {
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map((channel: Channel) => {
|
||||
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
|
||||
channel.site = config.site
|
||||
|
||||
return channel
|
||||
})
|
||||
|
||||
let output = new Collection()
|
||||
parsedChannels.forEach((channel: Channel) => {
|
||||
const found: Channel | undefined = channels.first(
|
||||
(_channel: Channel) => _channel.site_id == channel.site_id
|
||||
)
|
||||
const newChannelList = new ChannelList({ channels: [] })
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
output.add(channel)
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
output = output.orderBy([
|
||||
(channel: Channel) => channel.lang || '_',
|
||||
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: Channel) => channel.site_id
|
||||
])
|
||||
newChannelList.sort()
|
||||
|
||||
const xml = new XML(output)
|
||||
|
||||
await storage.save(outputFilepath, xml.toString())
|
||||
await storage.save(outputFilepath, newChannelList.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
import { Storage, Collection, Dictionary, File } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { Channel, Feed } from '../../models'
|
||||
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Storage, Dictionary, File } from '@freearhey/core'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelList } from '../../models'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
|
||||
program.argument('[filepath]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
@@ -19,15 +21,14 @@ type ValidationError = {
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const parser = new ChannelsParser({ storage: new Storage() })
|
||||
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const channelsData = await dataStorage.json('channels.json')
|
||||
const channels = new Collection(channelsData).map(data => new Channel(data))
|
||||
const channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
const feedsData = await dataStorage.json('feeds.json')
|
||||
const feeds = new Collection(feedsData).map(data => new Feed(data))
|
||||
const feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
|
||||
const parser = new ChannelsParser({
|
||||
storage: new Storage()
|
||||
})
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
@@ -38,11 +39,11 @@ async function main() {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const parsedChannels = await parser.parse(filepath)
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
channelList.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
|
||||
import { Option, program } from 'commander'
|
||||
import { QueueCreator, Job, ChannelsParser } from '../../core'
|
||||
import { Option, program } from 'commander'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { ChannelList } from '../../models'
|
||||
|
||||
program
|
||||
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
|
||||
@@ -31,7 +32,7 @@ program
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
|
||||
)
|
||||
.argParser(value => (value !== undefined ? parseInt(value) : undefined))
|
||||
.argParser(value => parseInt(value))
|
||||
.env('DAYS')
|
||||
)
|
||||
.addOption(
|
||||
@@ -87,31 +88,35 @@ async function main() {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let parsedChannels = new Collection()
|
||||
let channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
parsedChannels = parsedChannels.concat(await parser.parse(filepath))
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
channels = channels.concat(channelList.channels)
|
||||
}
|
||||
|
||||
if (options.lang) {
|
||||
parsedChannels = parsedChannels.filter((channel: Channel) => {
|
||||
channels = channels.filter((channel: Channel) => {
|
||||
if (!options.lang || !channel.lang) return true
|
||||
|
||||
return options.lang.includes(channel.lang)
|
||||
})
|
||||
}
|
||||
logger.info(` found ${parsedChannels.count()} channel(s)`)
|
||||
|
||||
logger.info(` found ${channels.count()} channel(s)`)
|
||||
|
||||
logger.info('run:')
|
||||
runJob({ logger, parsedChannels })
|
||||
runJob({ logger, channels })
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runJob({ logger, parsedChannels }: { logger: Logger; parsedChannels: Collection }) {
|
||||
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) {
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
|
||||
const queueCreator = new QueueCreator({
|
||||
parsedChannels,
|
||||
channels,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
|
||||
@@ -1,21 +1,25 @@
|
||||
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelList, Issue, Site } from '../../models'
|
||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Issue, Site } from '../../models'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ disabled: true })
|
||||
const loader = new IssueLoader()
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const channelsParser = new ChannelsParser({ storage: sitesStorage })
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const channelsParser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await sitesStorage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loader.load()
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('putting the data together...')
|
||||
const brokenGuideReports = issues.filter(issue =>
|
||||
@@ -33,19 +37,21 @@ async function main() {
|
||||
|
||||
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
|
||||
for (const filepath of files) {
|
||||
const channels = await channelsParser.parse(filepath)
|
||||
const channelList: ChannelList = await channelsParser.parse(filepath)
|
||||
|
||||
site.totalChannels += channels.count()
|
||||
site.markedChannels += channels.filter((channel: Channel) => channel.xmltv_id).count()
|
||||
site.totalChannels += channelList.channels.count()
|
||||
site.markedChannels += channelList.channels
|
||||
.filter((channel: Channel) => channel.xmltv_id)
|
||||
.count()
|
||||
}
|
||||
|
||||
sites.add(site)
|
||||
}
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const data = new Collection()
|
||||
const tableData = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
data.add([
|
||||
tableData.add([
|
||||
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
|
||||
{ value: site.totalChannels, align: 'right' },
|
||||
{ value: site.markedChannels, align: 'right' },
|
||||
@@ -55,7 +61,7 @@ async function main() {
|
||||
})
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const table = new HTMLTable(data.all(), [
|
||||
const table = new HTMLTable(tableData.all(), [
|
||||
{ name: 'Site', align: 'left' },
|
||||
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
|
||||
{ name: 'Status', align: 'left' },
|
||||
|
||||
Reference in New Issue
Block a user