mirror of
https://github.com/iptv-org/epg
synced 2025-12-23 13:57:02 -05:00
Update scripts
This commit is contained in:
@@ -1,216 +1,200 @@
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, ChannelList, Feed } from '../../models'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
interface ChoiceValue { type: string; value?: Feed | Channel }
|
||||
interface Choice { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelList)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const parser = new ChannelsParser({ storage })
|
||||
channelList = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = channelList.channels.filter(
|
||||
(channel: epgGrabber.Channel) => !channel.xmltv_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const channel of parsedChannelsWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(
|
||||
channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId,
|
||||
channelsKeyById
|
||||
)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
channel: epgGrabber.Channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = feedsGroupedByChannelId.has(channelId)
|
||||
? new Collection(feedsGroupedByChannelId.get(channelId))
|
||||
: new Collection()
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string, channelList: ChannelList) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
storage.saveSync(filepath, channelList.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
import { loadData, data, searchChannels } from '../../api'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Channel } from '../../models'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
interface ChoiceValue {
|
||||
type: string
|
||||
value?: sdk.Models.Feed | sdk.Models.Channel
|
||||
}
|
||||
interface Choice {
|
||||
name: string
|
||||
short?: string
|
||||
value: ChoiceValue
|
||||
default?: boolean
|
||||
}
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelsFromXML = new Collection<Channel>()
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelsFromXML)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
const channelsFromXMLWithoutId = channelsFromXML.filter((channel: Channel) => !channel.xmltv_id)
|
||||
|
||||
logger.info(
|
||||
`found ${channelsFromXML.count()} channels (including ${channelsFromXMLWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('starting...')
|
||||
console.log()
|
||||
|
||||
for (const channel of channelsFromXMLWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(channel)
|
||||
} catch {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
channelsFromXMLWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(channel: epgGrabber.Channel): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchChannels(query)
|
||||
const choices = getChoicesForChannel(similarChannels).all()
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '')
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string): Promise<string> {
|
||||
const channelFeeds = new Collection(data.feedsGroupedByChannelId.get(channelId))
|
||||
const choices = getChoicesForFeed(channelFeeds).all()
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChoicesForChannel(channels: Collection<sdk.Models.Channel>): Collection<Choice> {
|
||||
const choices = new Collection<Choice>()
|
||||
|
||||
channels.forEach((channel: sdk.Models.Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
|
||||
|
||||
choices.add({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choices.add({ name: 'Type...', value: { type: 'type' } })
|
||||
choices.add({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choices
|
||||
}
|
||||
|
||||
function getChoicesForFeed(feeds: Collection<sdk.Models.Feed>): Collection<Choice> {
|
||||
const choices = new Collection<Choice>()
|
||||
|
||||
feeds.forEach((feed: sdk.Models.Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.is_main) name += ' [main]'
|
||||
|
||||
choices.add({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.is_main,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choices.add({ name: 'Type...', value: { type: 'type' } })
|
||||
choices.add({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choices
|
||||
}
|
||||
|
||||
function save(filepath: string, channelsFromXML: Collection<Channel>) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const xml = generateChannelsXML(channelsFromXML)
|
||||
storage.saveSync(filepath, xml)
|
||||
console.log()
|
||||
logger.info(`File '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
|
||||
60
scripts/commands/channels/format.ts
Normal file
60
scripts/commands/channels/format.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { data, loadData } from '../../api'
|
||||
import { Channel } from '../../models'
|
||||
import { program } from 'commander'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading *.channels.xml files...')
|
||||
const storage = new Storage()
|
||||
const files = program.args.length
|
||||
? program.args
|
||||
: await storage.list(`${SITES_DIR}/**/*.channels.xml`)
|
||||
|
||||
logger.info(`found ${files.length} file(s)`)
|
||||
|
||||
logger.info('formating...')
|
||||
for (const filepath of files) {
|
||||
if (!storage.existsSync(filepath)) continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
channelsFromXML.forEach((channel: Channel) => {
|
||||
if (!channel.xmltv_id) return
|
||||
if (data.feedsKeyByStreamId.get(channel.xmltv_id)) return
|
||||
|
||||
const channelData = data.channelsKeyById.get(channel.xmltv_id)
|
||||
if (channelData) {
|
||||
const mainFeed = channelData.getMainFeed()
|
||||
if (mainFeed) {
|
||||
channel.xmltv_id = mainFeed.getStreamId()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
channel.xmltv_id = ''
|
||||
})
|
||||
|
||||
channelsFromXML.sortBy((channel: Channel) => channel.site_id)
|
||||
|
||||
const output = generateChannelsXML(channelsFromXML)
|
||||
|
||||
await storage.save(filepath, output)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
@@ -1,109 +1,109 @@
|
||||
import chalk from 'chalk'
|
||||
import { program } from 'commander'
|
||||
import { Storage, File } from '@freearhey/core'
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,86 +1,119 @@
|
||||
import { Logger, File, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { ChannelList } from '../../models'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
interface ParseOptions {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
channelList = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: Record<string, string> = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
|
||||
channel.site = config.site
|
||||
|
||||
return channel
|
||||
})
|
||||
|
||||
const newChannelList = new ChannelList({ channels: [] })
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sort()
|
||||
|
||||
await storage.save(outputFilepath, newChannelList.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import { Channel } from '../../models'
|
||||
import { Command } from 'commander'
|
||||
|
||||
interface SiteConfigChannelData {
|
||||
xmltv_id: string
|
||||
name: string
|
||||
site_id: string
|
||||
lang?: string
|
||||
logo?: string
|
||||
url?: string
|
||||
lcn?: string
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
interface ParseOptions {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
const args: Record<string, string> = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let channelsFromXML = new Collection<Channel>()
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
const xml = await storage.load(outputFilepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
}
|
||||
|
||||
let configChannels = config.channels(args)
|
||||
if (isPromise(configChannels)) {
|
||||
configChannels = await configChannels
|
||||
}
|
||||
|
||||
const channelsFromConfig = new Collection<SiteConfigChannelData>(configChannels).map(
|
||||
(data: SiteConfigChannelData) => {
|
||||
return new Channel({
|
||||
xmltv_id: data.xmltv_id,
|
||||
name: data.name,
|
||||
site_id: data.site_id,
|
||||
lang: data.lang || null,
|
||||
logo: data.logo || null,
|
||||
url: data.url || null,
|
||||
lcn: data.lcn || null,
|
||||
site: config.site,
|
||||
index: -1
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const newChannelList = new Collection<Channel>()
|
||||
channelsFromConfig.forEach((channel: Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: Channel | undefined = channelsFromXML.find(
|
||||
(_channel: Channel) => _channel.site_id == channel.site_id
|
||||
)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sortBy([
|
||||
(channel: Channel) => channel.lang || '_',
|
||||
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: Channel) => channel.site_id
|
||||
])
|
||||
|
||||
const xml = generateChannelsXML(newChannelList)
|
||||
|
||||
await storage.save(outputFilepath, xml)
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,100 +1,96 @@
|
||||
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Storage, Dictionary, File } from '@freearhey/core'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelList } from '../../models'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
interface ValidationError {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang?: string
|
||||
xmltv_id?: string
|
||||
site_id?: string
|
||||
logo?: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
|
||||
const parser = new ChannelsParser({
|
||||
storage: new Storage()
|
||||
})
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
let totalWarnings = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel })
|
||||
totalWarnings++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel })
|
||||
totalWarnings++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
const totalProblems = totalWarnings + totalErrors
|
||||
if (totalProblems > 0) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
|
||||
)
|
||||
)
|
||||
if (totalErrors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { loadData, data } from '../../api'
|
||||
import { Channel } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
interface ValidationError {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang: string | null
|
||||
xmltv_id: string | null
|
||||
site_id: string | null
|
||||
logo: string | null
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await loadData()
|
||||
const { channelsKeyById, feedsKeyByStreamId } = data
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
let totalWarnings = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const channelList = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.forEach((channel: Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel.toObject() })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel.toObject() })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel.toObject() })
|
||||
totalWarnings++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel.toObject() })
|
||||
totalWarnings++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
const totalProblems = totalWarnings + totalErrors
|
||||
if (totalProblems > 0) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
|
||||
)
|
||||
)
|
||||
if (totalErrors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
Reference in New Issue
Block a user