Update scripts

This commit is contained in:
freearhey
2025-10-22 02:27:22 +03:00
parent f701e0b830
commit 0b046f1f3c
50 changed files with 1655 additions and 2367 deletions

View File

@@ -1,41 +1,43 @@
import { Logger, Collection, Storage } from '@freearhey/core'
import { SITES_DIR, API_DIR } from '../../constants'
import { GuideChannel } from '../../models'
import { ChannelsParser } from '../../core'
import epgGrabber from 'epg-grabber'
import path from 'path'
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR)
const parser = new ChannelsParser({
storage: sitesStorage
})
const files: string[] = await sitesStorage.list('**/*.channels.xml')
const channels = new Collection()
for (const filepath of files) {
const channelList = await parser.parse(filepath)
channelList.channels.forEach((data: epgGrabber.Channel) => {
channels.add(new GuideChannel(data))
})
}
logger.info(`found ${channels.count()} channel(s)`)
const output = channels.map((channel: GuideChannel) => channel.toJSON())
const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()
import { ChannelGuideObject } from '../../types/channel'
import { SITES_DIR, API_DIR } from '../../constants'
import { Logger, Collection } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { Storage } from '@freearhey/storage-js'
import { Channel } from '../../models'
import path from 'path'
async function main() {
const logger = new Logger()
logger.start('staring...')
logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR)
const files: string[] = await sitesStorage.list('**/*.channels.xml')
const channels = new Collection<Channel>()
for (const filepath of files) {
const xml = await sitesStorage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.forEach((channel: Channel) => {
channels.add(channel)
})
}
logger.info(`found ${channels.count()} channel(s)`)
const output = channels.map<ChannelGuideObject>((channel: Channel) => channel.getGuideObject())
const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json'
await apiStorage.save('guides.json', output.toJSON())
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
}
main()

View File

@@ -1,25 +1,7 @@
import { DATA_DIR } from '../../constants'
import { Storage } from '@freearhey/core'
import { DataLoader } from '../../core'
async function main() {
const storage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage })
await Promise.all([
loader.download('blocklist.json'),
loader.download('categories.json'),
loader.download('channels.json'),
loader.download('countries.json'),
loader.download('languages.json'),
loader.download('regions.json'),
loader.download('subdivisions.json'),
loader.download('feeds.json'),
loader.download('timezones.json'),
loader.download('guides.json'),
loader.download('streams.json'),
loader.download('logos.json')
])
}
main()
import { downloadData } from '../../api'
async function main() {
await downloadData()
}
main()

View File

@@ -1,216 +1,200 @@
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
import type { DataProcessorData } from '../../types/dataProcessor'
import type { DataLoaderData } from '../../types/dataLoader'
import { ChannelSearchableData } from '../../types/channel'
import { Channel, ChannelList, Feed } from '../../models'
import { DataProcessor, DataLoader } from '../../core'
import { select, input } from '@inquirer/prompts'
import { ChannelsParser } from '../../core'
import { DATA_DIR } from '../../constants'
import nodeCleanup from 'node-cleanup'
import sjs from '@freearhey/search-js'
import epgGrabber from 'epg-grabber'
import { Command } from 'commander'
import readline from 'readline'
interface ChoiceValue { type: string; value?: Feed | Channel }
interface Choice { name: string; short?: string; value: ChoiceValue; default?: boolean }
if (process.platform === 'win32') {
readline
.createInterface({
input: process.stdin,
output: process.stdout
})
.on('SIGINT', function () {
process.emit('SIGINT')
})
}
const program = new Command()
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
const filepath = program.args[0]
const logger = new Logger()
const storage = new Storage()
let channelList = new ChannelList({ channels: [] })
main(filepath)
nodeCleanup(() => {
save(filepath, channelList)
})
export default async function main(filepath: string) {
if (!(await storage.exists(filepath))) {
throw new Error(`File "${filepath}" does not exists`)
}
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
processor.process(data)
logger.info('loading channels...')
const parser = new ChannelsParser({ storage })
channelList = await parser.parse(filepath)
const parsedChannelsWithoutId = channelList.channels.filter(
(channel: epgGrabber.Channel) => !channel.xmltv_id
)
logger.info(
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
)
logger.info('creating search index...')
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
const searchIndex = sjs.createIndex(items, {
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
})
logger.info('starting...\n')
for (const channel of parsedChannelsWithoutId.all()) {
try {
channel.xmltv_id = await selectChannel(
channel,
searchIndex,
feedsGroupedByChannelId,
channelsKeyById
)
} catch (err) {
logger.info(err.message)
break
}
}
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => {
if (channel.xmltv_id === '-') {
channel.xmltv_id = ''
}
})
}
async function selectChannel(
channel: epgGrabber.Channel,
searchIndex,
feedsGroupedByChannelId: Dictionary,
channelsKeyById: Dictionary
): Promise<string> {
const query = escapeRegex(channel.name)
const similarChannels = searchIndex
.search(query)
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
const selected: ChoiceValue = await select({
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
choices: getChannelChoises(new Collection(similarChannels)),
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type': {
const typedChannelId = await input({ message: ' Channel ID:' })
if (!typedChannelId) return ''
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
if (selectedFeedId === '-') return typedChannelId
return [typedChannelId, selectedFeedId].join('@')
}
case 'channel': {
const selectedChannel = selected.value
if (!selectedChannel) return ''
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
if (selectedFeedId === '-') return selectedChannel.id || ''
return [selectedChannel.id, selectedFeedId].join('@')
}
}
return ''
}
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
const channelFeeds = feedsGroupedByChannelId.has(channelId)
? new Collection(feedsGroupedByChannelId.get(channelId))
: new Collection()
const choices = getFeedChoises(channelFeeds)
const selected: ChoiceValue = await select({
message: `Select feed ID for "${channelId}":`,
choices,
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type':
return await input({ message: ' Feed ID:', default: 'SD' })
case 'feed':
const selectedFeed = selected.value
if (!selectedFeed) return ''
return selectedFeed.id || ''
}
return ''
}
function getChannelChoises(channels: Collection): Choice[] {
const choises: Choice[] = []
channels.forEach((channel: Channel) => {
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ')
choises.push({
value: {
type: 'channel',
value: channel
},
name: `${channel.id} (${names})`,
short: `${channel.id}`
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function getFeedChoises(feeds: Collection): Choice[] {
const choises: Choice[] = []
feeds.forEach((feed: Feed) => {
let name = `${feed.id} (${feed.name})`
if (feed.isMain) name += ' [main]'
choises.push({
value: {
type: 'feed',
value: feed
},
default: feed.isMain,
name,
short: feed.id
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function save(filepath: string, channelList: ChannelList) {
if (!storage.existsSync(filepath)) return
storage.saveSync(filepath, channelList.toString())
logger.info(`\nFile '${filepath}' successfully saved`)
}
function escapeRegex(string: string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
}
import { loadData, data, searchChannels } from '../../api'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { Collection, Logger } from '@freearhey/core'
import { select, input } from '@inquirer/prompts'
import { generateChannelsXML } from '../../core'
import { Storage } from '@freearhey/storage-js'
import { Channel } from '../../models'
import nodeCleanup from 'node-cleanup'
import * as sdk from '@iptv-org/sdk'
import { Command } from 'commander'
import readline from 'readline'
interface ChoiceValue {
type: string
value?: sdk.Models.Feed | sdk.Models.Channel
}
interface Choice {
name: string
short?: string
value: ChoiceValue
default?: boolean
}
if (process.platform === 'win32') {
readline
.createInterface({
input: process.stdin,
output: process.stdout
})
.on('SIGINT', function () {
process.emit('SIGINT')
})
}
const program = new Command()
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
const filepath = program.args[0]
const logger = new Logger()
const storage = new Storage()
let channelsFromXML = new Collection<Channel>()
main(filepath)
nodeCleanup(() => {
save(filepath, channelsFromXML)
})
export default async function main(filepath: string) {
if (!(await storage.exists(filepath))) {
throw new Error(`File "${filepath}" does not exists`)
}
logger.info('loading data from api...')
await loadData()
logger.info('loading channels...')
const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
const channelsFromXMLWithoutId = channelsFromXML.filter((channel: Channel) => !channel.xmltv_id)
logger.info(
`found ${channelsFromXML.count()} channels (including ${channelsFromXMLWithoutId.count()} without ID)`
)
logger.info('starting...')
console.log()
for (const channel of channelsFromXMLWithoutId.all()) {
try {
channel.xmltv_id = await selectChannel(channel)
} catch {
break
}
}
channelsFromXMLWithoutId.forEach((channel: epgGrabber.Channel) => {
if (channel.xmltv_id === '-') {
channel.xmltv_id = ''
}
})
}
async function selectChannel(channel: epgGrabber.Channel): Promise<string> {
const query = escapeRegex(channel.name)
const similarChannels = searchChannels(query)
const choices = getChoicesForChannel(similarChannels).all()
const selected: ChoiceValue = await select({
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
choices,
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type': {
const typedChannelId = await input({ message: ' Channel ID:' })
if (!typedChannelId) return ''
const selectedFeedId = await selectFeed(typedChannelId)
if (selectedFeedId === '-') return typedChannelId
return [typedChannelId, selectedFeedId].join('@')
}
case 'channel': {
const selectedChannel = selected.value
if (!selectedChannel) return ''
const selectedFeedId = await selectFeed(selectedChannel.id || '')
if (selectedFeedId === '-') return selectedChannel.id || ''
return [selectedChannel.id, selectedFeedId].join('@')
}
}
return ''
}
async function selectFeed(channelId: string): Promise<string> {
const channelFeeds = new Collection(data.feedsGroupedByChannelId.get(channelId))
const choices = getChoicesForFeed(channelFeeds).all()
const selected: ChoiceValue = await select({
message: `Select feed ID for "${channelId}":`,
choices,
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type':
return await input({ message: ' Feed ID:', default: 'SD' })
case 'feed':
const selectedFeed = selected.value
if (!selectedFeed) return ''
return selectedFeed.id || ''
}
return ''
}
function getChoicesForChannel(channels: Collection<sdk.Models.Channel>): Collection<Choice> {
const choices = new Collection<Choice>()
channels.forEach((channel: sdk.Models.Channel) => {
const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
choices.add({
value: {
type: 'channel',
value: channel
},
name: `${channel.id} (${names})`,
short: `${channel.id}`
})
})
choices.add({ name: 'Type...', value: { type: 'type' } })
choices.add({ name: 'Skip', value: { type: 'skip' } })
return choices
}
function getChoicesForFeed(feeds: Collection<sdk.Models.Feed>): Collection<Choice> {
const choices = new Collection<Choice>()
feeds.forEach((feed: sdk.Models.Feed) => {
let name = `${feed.id} (${feed.name})`
if (feed.is_main) name += ' [main]'
choices.add({
value: {
type: 'feed',
value: feed
},
default: feed.is_main,
name,
short: feed.id
})
})
choices.add({ name: 'Type...', value: { type: 'type' } })
choices.add({ name: 'Skip', value: { type: 'skip' } })
return choices
}
function save(filepath: string, channelsFromXML: Collection<Channel>) {
if (!storage.existsSync(filepath)) return
const xml = generateChannelsXML(channelsFromXML)
storage.saveSync(filepath, xml)
console.log()
logger.info(`File '${filepath}' successfully saved`)
}
function escapeRegex(string: string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
}

View File

@@ -0,0 +1,60 @@
import { Collection, Logger } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { generateChannelsXML } from '../../core'
import { Storage } from '@freearhey/storage-js'
import { SITES_DIR } from '../../constants'
import { data, loadData } from '../../api'
import { Channel } from '../../models'
import { program } from 'commander'
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
await loadData()
logger.info('loading *.channels.xml files...')
const storage = new Storage()
const files = program.args.length
? program.args
: await storage.list(`${SITES_DIR}/**/*.channels.xml`)
logger.info(`found ${files.length} file(s)`)
logger.info('formating...')
for (const filepath of files) {
if (!storage.existsSync(filepath)) continue
const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.forEach((channel: Channel) => {
if (!channel.xmltv_id) return
if (data.feedsKeyByStreamId.get(channel.xmltv_id)) return
const channelData = data.channelsKeyById.get(channel.xmltv_id)
if (channelData) {
const mainFeed = channelData.getMainFeed()
if (mainFeed) {
channel.xmltv_id = mainFeed.getStreamId()
return
}
}
channel.xmltv_id = ''
})
channelsFromXML.sortBy((channel: Channel) => channel.site_id)
const output = generateChannelsXML(channelsFromXML)
await storage.save(filepath, output)
}
}
main()

View File

@@ -1,109 +1,109 @@
import chalk from 'chalk'
import { program } from 'commander'
import { Storage, File } from '@freearhey/core'
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<xs:element name="channels">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="channel">
<xs:complexType mixed="true">
<xs:attribute use="required" ref="site"/>
<xs:attribute use="required" ref="lang"/>
<xs:attribute use="required" ref="site_id"/>
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
<xs:attribute name="logo" type="xs:string"/>
<xs:attribute name="lcn" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:attribute name="site">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="site_id">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="lang">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:schema>`
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
async function main() {
const storage = new Storage()
let errors: ErrorDetail[] = []
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
for (const filepath of files) {
const file = new File(filepath)
if (file.extension() !== 'xml') continue
const xml = await storage.load(filepath)
let localErrors: ErrorDetail[] = []
try {
const schema = XmlDocument.fromString(xsd)
const validator = XsdValidator.fromDoc(schema)
const doc = XmlDocument.fromString(xml)
validator.validate(doc)
schema.dispose()
validator.dispose()
doc.dispose()
} catch (_error) {
const error = _error as XmlValidateError
localErrors = localErrors.concat(error.details)
}
xml.split('\n').forEach((line: string, lineIndex: number) => {
const found = line.match(/='/)
if (found) {
const colIndex = found.index || 0
localErrors.push({
line: lineIndex + 1,
col: colIndex + 1,
message: 'Single quotes cannot be used in attributes'
})
}
})
if (localErrors.length) {
console.log(`\n${chalk.underline(filepath)}`)
localErrors.forEach((error: ErrorDetail) => {
const position = `${error.line}:${error.col}`
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
})
errors = errors.concat(localErrors)
}
}
if (errors.length) {
console.log(chalk.red(`\n${errors.length} error(s)`))
process.exit(1)
}
}
main()
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
import { Storage, File } from '@freearhey/storage-js'
import { program } from 'commander'
import chalk from 'chalk'
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
<xs:element name="channels">
<xs:complexType>
<xs:sequence>
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
</xs:sequence>
</xs:complexType>
</xs:element>
<xs:element name="channel">
<xs:complexType mixed="true">
<xs:attribute use="required" ref="site"/>
<xs:attribute use="required" ref="lang"/>
<xs:attribute use="required" ref="site_id"/>
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
<xs:attribute name="logo" type="xs:string"/>
<xs:attribute name="lcn" type="xs:string"/>
</xs:complexType>
</xs:element>
<xs:attribute name="site">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="site_id">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
<xs:attribute name="lang">
<xs:simpleType>
<xs:restriction base="xs:string">
<xs:minLength value="1"/>
</xs:restriction>
</xs:simpleType>
</xs:attribute>
</xs:schema>`
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
async function main() {
const storage = new Storage()
let errors: ErrorDetail[] = []
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
for (const filepath of files) {
const file = new File(filepath)
if (file.extension() !== 'xml') continue
const xml = await storage.load(filepath)
let localErrors: ErrorDetail[] = []
try {
const schema = XmlDocument.fromString(xsd)
const validator = XsdValidator.fromDoc(schema)
const doc = XmlDocument.fromString(xml)
validator.validate(doc)
schema.dispose()
validator.dispose()
doc.dispose()
} catch (_error) {
const error = _error as XmlValidateError
localErrors = localErrors.concat(error.details)
}
xml.split('\n').forEach((line: string, lineIndex: number) => {
const found = line.match(/='/)
if (found) {
const colIndex = found.index || 0
localErrors.push({
line: lineIndex + 1,
col: colIndex + 1,
message: 'Single quotes cannot be used in attributes'
})
}
})
if (localErrors.length) {
console.log(`\n${chalk.underline(filepath)}`)
localErrors.forEach((error: ErrorDetail) => {
const position = `${error.line}:${error.col}`
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
})
errors = errors.concat(localErrors)
}
}
if (errors.length) {
console.log(chalk.red(`\n${errors.length} error(s)`))
process.exit(1)
}
}
main()

View File

@@ -1,86 +1,119 @@
import { Logger, File, Storage } from '@freearhey/core'
import { ChannelsParser } from '../../core'
import { ChannelList } from '../../models'
import { pathToFileURL } from 'node:url'
import epgGrabber from 'epg-grabber'
import { Command } from 'commander'
const program = new Command()
program
.requiredOption('-c, --config <config>', 'Config file')
.option('-s, --set [args...]', 'Set custom arguments')
.option('-o, --output <output>', 'Output file')
.parse(process.argv)
interface ParseOptions {
config: string
set?: string
output?: string
clean?: boolean
}
const options: ParseOptions = program.opts()
async function main() {
function isPromise(promise: object[] | Promise<object[]>) {
return (
!!promise &&
typeof promise === 'object' &&
typeof (promise as Promise<object[]>).then === 'function'
)
}
const storage = new Storage()
const logger = new Logger()
const parser = new ChannelsParser({ storage })
const file = new File(options.config)
const dir = file.dirname()
const config = (await import(pathToFileURL(options.config).toString())).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
let channelList = new ChannelList({ channels: [] })
if (await storage.exists(outputFilepath)) {
channelList = await parser.parse(outputFilepath)
}
const args: Record<string, string> = {}
if (Array.isArray(options.set)) {
options.set.forEach((arg: string) => {
const [key, value] = arg.split(':')
args[key] = value
})
}
let parsedChannels = config.channels(args)
if (isPromise(parsedChannels)) {
parsedChannels = await parsedChannels
}
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
channel.site = config.site
return channel
})
const newChannelList = new ChannelList({ channels: [] })
parsedChannels.forEach((channel: epgGrabber.Channel) => {
if (!channel.site_id) return
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
if (found) {
channel.xmltv_id = found.xmltv_id
channel.lang = found.lang
}
newChannelList.add(channel)
})
newChannelList.sort()
await storage.save(outputFilepath, newChannelList.toString())
logger.info(`File '${outputFilepath}' successfully saved`)
}
main()
import { Storage, File } from '@freearhey/storage-js'
import { Collection, Logger } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { generateChannelsXML } from '../../core'
import { pathToFileURL } from 'node:url'
import { Channel } from '../../models'
import { Command } from 'commander'
interface SiteConfigChannelData {
xmltv_id: string
name: string
site_id: string
lang?: string
logo?: string
url?: string
lcn?: string
}
const program = new Command()
program
.requiredOption('-c, --config <config>', 'Config file')
.option('-s, --set [args...]', 'Set custom arguments')
.option('-o, --output <output>', 'Output file')
.parse(process.argv)
interface ParseOptions {
config: string
set?: string
output?: string
clean?: boolean
}
const options: ParseOptions = program.opts()
async function main() {
function isPromise(promise: object[] | Promise<object[]>) {
return (
!!promise &&
typeof promise === 'object' &&
typeof (promise as Promise<object[]>).then === 'function'
)
}
const storage = new Storage()
const logger = new Logger()
const file = new File(options.config)
const dir = file.dirname()
const config = (await import(pathToFileURL(options.config).toString())).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
const args: Record<string, string> = {}
if (Array.isArray(options.set)) {
options.set.forEach((arg: string) => {
const [key, value] = arg.split(':')
args[key] = value
})
}
let channelsFromXML = new Collection<Channel>()
if (await storage.exists(outputFilepath)) {
const xml = await storage.load(outputFilepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
}
let configChannels = config.channels(args)
if (isPromise(configChannels)) {
configChannels = await configChannels
}
const channelsFromConfig = new Collection<SiteConfigChannelData>(configChannels).map(
(data: SiteConfigChannelData) => {
return new Channel({
xmltv_id: data.xmltv_id,
name: data.name,
site_id: data.site_id,
lang: data.lang || null,
logo: data.logo || null,
url: data.url || null,
lcn: data.lcn || null,
site: config.site,
index: -1
})
}
)
const newChannelList = new Collection<Channel>()
channelsFromConfig.forEach((channel: Channel) => {
if (!channel.site_id) return
const found: Channel | undefined = channelsFromXML.find(
(_channel: Channel) => _channel.site_id == channel.site_id
)
if (found) {
channel.xmltv_id = found.xmltv_id
channel.lang = found.lang
}
newChannelList.add(channel)
})
newChannelList.sortBy([
(channel: Channel) => channel.lang || '_',
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
(channel: Channel) => channel.site_id
])
const xml = generateChannelsXML(newChannelList)
await storage.save(outputFilepath, xml)
logger.info(`File '${outputFilepath}' successfully saved`)
}
main()

View File

@@ -1,100 +1,96 @@
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
import { DataProcessorData } from '../../types/dataProcessor'
import { Storage, Dictionary, File } from '@freearhey/core'
import { DataLoaderData } from '../../types/dataLoader'
import { ChannelList } from '../../models'
import { DATA_DIR } from '../../constants'
import epgGrabber from 'epg-grabber'
import { program } from 'commander'
import chalk from 'chalk'
import langs from 'langs'
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
interface ValidationError {
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
name: string
lang?: string
xmltv_id?: string
site_id?: string
logo?: string
}
async function main() {
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
const parser = new ChannelsParser({
storage: new Storage()
})
let totalFiles = 0
let totalErrors = 0
let totalWarnings = 0
const storage = new Storage()
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
for (const filepath of files) {
const file = new File(filepath)
if (file.extension() !== 'xml') continue
const channelList: ChannelList = await parser.parse(filepath)
const bufferBySiteId = new Dictionary()
const errors: ValidationError[] = []
channelList.channels.forEach((channel: epgGrabber.Channel) => {
const bufferId: string = channel.site_id
if (bufferBySiteId.missing(bufferId)) {
bufferBySiteId.set(bufferId, true)
} else {
errors.push({ type: 'duplicate', ...channel })
totalErrors++
}
if (!langs.where('1', channel.lang ?? '')) {
errors.push({ type: 'wrong_lang', ...channel })
totalErrors++
}
if (!channel.xmltv_id) return
const [channelId, feedId] = channel.xmltv_id.split('@')
const foundChannel = channelsKeyById.get(channelId)
if (!foundChannel) {
errors.push({ type: 'wrong_channel_id', ...channel })
totalWarnings++
}
if (feedId) {
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
if (!foundFeed) {
errors.push({ type: 'wrong_feed_id', ...channel })
totalWarnings++
}
}
})
if (errors.length) {
console.log(chalk.underline(filepath))
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
console.log()
totalFiles++
}
}
const totalProblems = totalWarnings + totalErrors
if (totalProblems > 0) {
console.log(
chalk.red(
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
)
)
if (totalErrors > 0) {
process.exit(1)
}
}
}
main()
import { Collection, Dictionary } from '@freearhey/core'
import { Storage, File } from '@freearhey/storage-js'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { loadData, data } from '../../api'
import { Channel } from '../../models'
import { program } from 'commander'
import chalk from 'chalk'
import langs from 'langs'
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
interface ValidationError {
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
name: string
lang: string | null
xmltv_id: string | null
site_id: string | null
logo: string | null
}
async function main() {
await loadData()
const { channelsKeyById, feedsKeyByStreamId } = data
let totalFiles = 0
let totalErrors = 0
let totalWarnings = 0
const storage = new Storage()
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
for (const filepath of files) {
const file = new File(filepath)
if (file.extension() !== 'xml') continue
const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelList = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
const bufferBySiteId = new Dictionary()
const errors: ValidationError[] = []
channelList.forEach((channel: Channel) => {
const bufferId: string = channel.site_id
if (bufferBySiteId.missing(bufferId)) {
bufferBySiteId.set(bufferId, true)
} else {
errors.push({ type: 'duplicate', ...channel.toObject() })
totalErrors++
}
if (!langs.where('1', channel.lang ?? '')) {
errors.push({ type: 'wrong_lang', ...channel.toObject() })
totalErrors++
}
if (!channel.xmltv_id) return
const [channelId, feedId] = channel.xmltv_id.split('@')
const foundChannel = channelsKeyById.get(channelId)
if (!foundChannel) {
errors.push({ type: 'wrong_channel_id', ...channel.toObject() })
totalWarnings++
}
if (feedId) {
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
if (!foundFeed) {
errors.push({ type: 'wrong_feed_id', ...channel.toObject() })
totalWarnings++
}
}
})
if (errors.length) {
console.log(chalk.underline(filepath))
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
console.log()
totalFiles++
}
}
const totalProblems = totalWarnings + totalErrors
if (totalProblems > 0) {
console.log(
chalk.red(
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
)
)
if (totalErrors > 0) {
process.exit(1)
}
}
}
main()

View File

@@ -1,133 +1,289 @@
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
import { QueueCreator, Job, ChannelsParser } from '../../core'
import { Option, program } from 'commander'
import { SITES_DIR } from '../../constants'
import { Channel } from 'epg-grabber'
import path from 'path'
import { ChannelList } from '../../models'
program
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
.addOption(
new Option(
'-c, --channels <path>',
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
)
)
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
.addOption(
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request').env(
'TIMEOUT'
)
)
.addOption(
new Option('-d, --delay <milliseconds>', 'Override the default delay between request').env(
'DELAY'
)
)
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
.addOption(
new Option(
'--days <days>',
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
)
.argParser(value => parseInt(value))
.env('DAYS')
)
.addOption(
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
.default(1)
.env('MAX_CONNECTIONS')
)
.addOption(
new Option('--gzip', 'Create a compressed version of the guide as well')
.default(false)
.env('GZIP')
)
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
.parse()
export interface GrabOptions {
site?: string
channels?: string
output: string
gzip: boolean
curl: boolean
maxConnections: number
timeout?: string
delay?: string
lang?: string
days?: number
proxy?: string
}
const options: GrabOptions = program.opts()
async function main() {
if (!options.site && !options.channels)
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
const logger = new Logger()
logger.start('starting...')
logger.info('config:')
logger.tree(options)
logger.info('loading channels...')
const storage = new Storage()
const parser = new ChannelsParser({ storage })
let files: string[] = []
if (options.site) {
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
pattern = pattern.replace(/\\/g, '/')
files = await storage.list(pattern)
} else if (options.channels) {
files = await storage.list(options.channels)
}
let channels = new Collection()
for (const filepath of files) {
const channelList: ChannelList = await parser.parse(filepath)
channels = channels.concat(channelList.channels)
}
if (options.lang) {
channels = channels.filter((channel: Channel) => {
if (!options.lang || !channel.lang) return true
return options.lang.includes(channel.lang)
})
}
logger.info(` found ${channels.count()} channel(s)`)
logger.info('run:')
runJob({ logger, channels })
}
main()
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) {
const timer = new Timer()
timer.start()
const queueCreator = new QueueCreator({
channels,
logger,
options
})
const queue = await queueCreator.create()
const job = new Job({
queue,
logger,
options
})
await job.run()
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
}
import { Logger, Timer, Collection, Template } from '@freearhey/core'
import epgGrabber, { EPGGrabber, EPGGrabberMock } from 'epg-grabber'
import { loadJs, parseProxy, SiteConfig, Queue } from '../../core'
import { Channel, Guide, Program } from '../../models'
import { SocksProxyAgent } from 'socks-proxy-agent'
import { PromisyClass, TaskQueue } from 'cwait'
import { Storage } from '@freearhey/storage-js'
import { QueueItem } from '../../types/queue'
import { Option, program } from 'commander'
import { SITES_DIR } from '../../constants'
import { data, loadData } from '../../api'
import dayjs, { Dayjs } from 'dayjs'
import path from 'path'
program
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
.addOption(
new Option(
'-c, --channels <path>',
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
)
)
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
.addOption(
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request')
.env('TIMEOUT')
.argParser(parseInt)
)
.addOption(
new Option('-d, --delay <milliseconds>', 'Override the default delay between request')
.env('DELAY')
.argParser(parseInt)
)
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
.addOption(
new Option(
'--days <days>',
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
)
.argParser(parseInt)
.env('DAYS')
)
.addOption(
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
.default(1)
.argParser(parseInt)
.env('MAX_CONNECTIONS')
)
.addOption(
new Option('--gzip', 'Create a compressed version of the guide as well')
.default(false)
.env('GZIP')
)
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
.parse()
interface GrabOptions {
site?: string
channels?: string
output: string
gzip: boolean
curl: boolean
maxConnections: number
timeout?: number
delay?: number
lang?: string
days?: number
proxy?: string
}
const options: GrabOptions = program.opts()
async function main() {
if (!options.site && !options.channels)
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
const logger = new Logger()
logger.info('starting...')
logger.info('config:')
logger.tree(options)
logger.info('loading channels...')
const storage = new Storage()
let files: string[] = []
if (options.site) {
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
pattern = pattern.replace(/\\/g, '/')
files = await storage.list(pattern)
} else if (options.channels) {
files = await storage.list(options.channels)
}
let channelsFromXML = new Collection<Channel>()
for (const filepath of files) {
const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const _channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.concat(_channelsFromXML)
}
if (options.lang) {
channelsFromXML = channelsFromXML.filter((channel: Channel) => {
if (!options.lang) return true
return options.lang.includes(channel.lang)
})
}
logger.info(`found ${channelsFromXML.count()} channel(s)`)
logger.info('loading api data...')
await loadData()
logger.info('creating queue...')
let index = 0
const queue = new Queue()
for (const channel of channelsFromXML.all()) {
channel.index = index++
if (!channel.site || !channel.site_id || !channel.name) continue
const configObject = await loadJs(channel.getConfigPath())
const siteConfig = new SiteConfig(configObject)
siteConfig.filepath = channel.getConfigPath()
if (options.timeout !== undefined) {
siteConfig.request = { ...siteConfig.request, ...{ timeout: options.timeout } }
}
if (options.delay !== undefined) siteConfig.delay = options.delay
if (options.curl !== undefined) siteConfig.curl = options.curl
if (options.proxy !== undefined) {
const proxy = parseProxy(options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(options.proxy)
siteConfig.request = {
...siteConfig.request,
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
}
} else {
siteConfig.request = { ...siteConfig.request, ...{ proxy } }
}
}
if (!channel.xmltv_id) channel.xmltv_id = channel.site_id
const days = options.days || siteConfig.days || 1
const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
dates.forEach((date: Dayjs) => {
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${date.toJSON()}`
if (queue.has(key)) return
queue.add(key, {
channel,
date,
siteConfig,
error: null
})
})
}
const grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
const taskQueue = new TaskQueue(Promise as PromisyClass, options.maxConnections)
const queueItems = queue.getItems()
const channels = new Collection<Channel>()
const programs = new Collection<Program>()
let i = 1
const total = queueItems.count()
const requests = queueItems.map(
taskQueue.wrap(async (queueItem: QueueItem) => {
const { channel, siteConfig, date } = queueItem
if (!channel.logo) {
if (siteConfig.logo) {
channel.logo = await grabber.loadLogo(channel, date)
} else {
channel.logo = getLogoForChannel(channel)
}
}
channels.add(channel)
const channelPrograms = await grabber.grab(
channel,
date,
siteConfig,
(context: epgGrabber.Types.GrabCallbackContext, error: Error | null) => {
logger.info(
` [${i}/${total}] ${context.channel.site} (${context.channel.lang}) - ${
context.channel.xmltv_id
} - ${context.date.format('MMM D, YYYY')} (${context.programs.length} programs)`
)
if (i < total) i++
if (error) {
logger.info(` ERR: ${error.message}`)
}
}
)
const _programs = new Collection<epgGrabber.Program>(channelPrograms).map<Program>(
program => new Program(program.toObject())
)
programs.concat(_programs)
})
)
logger.info('run:')
const timer = new Timer()
timer.start()
await Promise.all(requests.all())
const pathTemplate = new Template(options.output)
const channelsGroupedByKey = channels
.sortBy([(channel: Channel) => channel.index, (channel: Channel) => channel.xmltv_id])
.uniqBy((channel: Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`)
.groupBy((channel: Channel) => {
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
})
const programsGroupedByKey = programs
.sortBy([(program: Program) => program.channel, (program: Program) => program.start])
.groupBy((program: Program) => {
const lang =
program.titles && program.titles.length && program.titles[0].lang
? program.titles[0].lang
: 'en'
return pathTemplate.format({ lang, site: program.site || '' })
})
for (const groupKey of channelsGroupedByKey.keys()) {
const groupChannels = new Collection(channelsGroupedByKey.get(groupKey))
const groupPrograms = new Collection(programsGroupedByKey.get(groupKey))
const guide = new Guide({
filepath: groupKey,
gzip: options.gzip,
channels: groupChannels,
programs: groupPrograms
})
await guide.save({ logger })
}
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
}
main()
function getLogoForChannel(channel: Channel): string | null {
const feedData = data.feedsKeyByStreamId.get(channel.xmltv_id)
if (feedData) {
const firstLogo = feedData.getLogos().first()
if (firstLogo) return firstLogo.url
}
const [channelId] = channel.xmltv_id.split('@')
const channelData = data.channelsKeyById.get(channelId)
if (channelData) {
const firstLogo = channelData.getLogos().first()
if (firstLogo) return firstLogo.url
}
return null
}

View File

@@ -1,45 +1,46 @@
import { Logger, Storage } from '@freearhey/core'
import { SITES_DIR } from '../../constants'
import { pathToFileURL } from 'node:url'
import { program } from 'commander'
import fs from 'fs-extra'
program.argument('<site>', 'Domain name of the site').parse(process.argv)
const domain = program.args[0]
async function main() {
const storage = new Storage(SITES_DIR)
const logger = new Logger()
logger.info(`Initializing "${domain}"...\r\n`)
const dir = domain
if (await storage.exists(dir)) {
throw new Error(`Folder "${dir}" already exists`)
}
await storage.createDir(dir)
logger.info(`Creating "${dir}/${domain}.test.js"...`)
const testTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_test.js'), {
encoding: 'utf8'
})
await storage.save(`${dir}/${domain}.test.js`, testTemplate.replace(/<DOMAIN>/g, domain))
logger.info(`Creating "${dir}/${domain}.config.js"...`)
const configTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_config.js'), {
encoding: 'utf8'
})
await storage.save(`${dir}/${domain}.config.js`, configTemplate.replace(/<DOMAIN>/g, domain))
logger.info(`Creating "${dir}/readme.md"...`)
const readmeTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_readme.md'), {
encoding: 'utf8'
})
await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain))
logger.info('\r\nDone')
}
main()
import { SITES_DIR, EOL } from '../../constants'
import { Storage } from '@freearhey/storage-js'
import { Logger } from '@freearhey/core'
import { pathToFileURL } from 'node:url'
import { program } from 'commander'
import fs from 'fs-extra'
program.argument('<site>', 'Domain name of the site').parse(process.argv)
const domain = program.args[0]
async function main() {
const storage = new Storage(SITES_DIR)
const logger = new Logger()
logger.info(`Initializing "${domain}"...${EOL}`)
const dir = domain
if (await storage.exists(dir)) {
throw new Error(`Folder "${dir}" already exists`)
}
await storage.createDir(dir)
logger.info(`Creating "${dir}/${domain}.test.js"...`)
const testTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_test.js'), {
encoding: 'utf8'
})
await storage.save(`${dir}/${domain}.test.js`, testTemplate.replace(/<DOMAIN>/g, domain))
logger.info(`Creating "${dir}/${domain}.config.js"...`)
const configTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_config.js'), {
encoding: 'utf8'
})
await storage.save(`${dir}/${domain}.config.js`, configTemplate.replace(/<DOMAIN>/g, domain))
logger.info(`Creating "${dir}/readme.md"...`)
const readmeTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_readme.md'), {
encoding: 'utf8'
})
await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain))
logger.info(`${EOL}Done`)
}
main()

View File

@@ -1,76 +1,80 @@
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
import { Logger, Storage, Collection } from '@freearhey/core'
import { ChannelList, Issue, Site } from '../../models'
import { SITES_DIR, ROOT_DIR } from '../../constants'
import { Channel } from 'epg-grabber'
async function main() {
const logger = new Logger({ level: -999 })
const issueLoader = new IssueLoader()
const sitesStorage = new Storage(SITES_DIR)
const sites = new Collection()
logger.info('loading channels...')
const channelsParser = new ChannelsParser({
storage: sitesStorage
})
logger.info('loading list of sites')
const folders = await sitesStorage.list('*/')
logger.info('loading issues...')
const issues = await issueLoader.load()
logger.info('putting the data together...')
const brokenGuideReports = issues.filter(issue =>
issue.labels.find((label: string) => label === 'broken guide')
)
for (const domain of folders) {
const filteredIssues = brokenGuideReports.filter(
(issue: Issue) => domain === issue.data.get('site')
)
const site = new Site({
domain,
issues: filteredIssues
})
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
for (const filepath of files) {
const channelList: ChannelList = await channelsParser.parse(filepath)
site.totalChannels += channelList.channels.count()
site.markedChannels += channelList.channels
.filter((channel: Channel) => channel.xmltv_id)
.count()
}
sites.add(site)
}
logger.info('creating sites table...')
const tableData = new Collection()
sites.forEach((site: Site) => {
tableData.add([
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
{ value: site.totalChannels, align: 'right' },
{ value: site.markedChannels, align: 'right' },
{ value: site.getStatus().emoji, align: 'center' },
{ value: site.getIssues().all().join(', ') }
])
})
logger.info('updating sites.md...')
const table = new HTMLTable(tableData.all(), [
{ name: 'Site', align: 'left' },
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
{ name: 'Status', align: 'left' },
{ name: 'Notes', align: 'left' }
])
const rootStorage = new Storage(ROOT_DIR)
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
await rootStorage.save('SITES.md', sitesContent)
}
main()
import { HTMLTableDataItem, HTMLTableRow, HTMLTableColumn } from '../../types/htmlTable'
import { SITES_DIR, ROOT_DIR } from '../../constants'
import { Logger, Collection } from '@freearhey/core'
import { Issue, Site, Channel } from '../../models'
import { HTMLTable, loadIssues } from '../../core'
import { Storage } from '@freearhey/storage-js'
import * as epgGrabber from 'epg-grabber'
import { EPGGrabber } from 'epg-grabber'
async function main() {
const logger = new Logger({ level: -999 })
const sitesStorage = new Storage(SITES_DIR)
const sites = new Collection<Site>()
logger.info('loading list of sites')
const folders = await sitesStorage.list('*/')
logger.info('loading issues...')
const issues = await loadIssues()
logger.info('putting the data together...')
const brokenGuideReports = issues.filter(issue =>
issue.labels.find((label: string) => label === 'broken guide')
)
for (const domain of folders) {
const filteredIssues = brokenGuideReports.filter(
(issue: Issue) => domain === issue.data.get('site')
)
const site = new Site({
domain,
issues: filteredIssues
})
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
for (const filepath of files) {
const xml = await sitesStorage.load(filepath)
const channelsFromXML = EPGGrabber.parseChannelsXML(xml)
const channels = new Collection(channelsFromXML).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
site.totalChannels += channels.count()
site.markedChannels += channels.filter((channel: Channel) => channel.xmltv_id).count()
}
sites.add(site)
}
logger.info('creating sites table...')
const rows = new Collection<HTMLTableRow>()
sites.forEach((site: Site) => {
rows.add(
new Collection<HTMLTableDataItem>([
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
{ value: site.totalChannels.toString(), align: 'right' },
{ value: site.markedChannels.toString(), align: 'right' },
{ value: site.getStatus().emoji, align: 'center' },
{ value: site.getIssueUrls().all().join(', ') }
])
)
})
logger.info('updating sites.md...')
const table = new HTMLTable(
rows,
new Collection<HTMLTableColumn>([
{ name: 'Site', align: 'left' },
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
{ name: 'Status', align: 'left' },
{ name: 'Notes', align: 'left' }
])
)
const rootStorage = new Storage(ROOT_DIR)
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
await rootStorage.save('SITES.md', sitesContent)
}
main()