Update scripts

This commit is contained in:
freearhey
2025-10-22 02:27:22 +03:00
parent f701e0b830
commit 0b046f1f3c
50 changed files with 1655 additions and 2367 deletions

117
scripts/api.ts Normal file
View File

@@ -0,0 +1,117 @@
import { Collection, Dictionary } from '@freearhey/core'
import { DATA_DIR } from './constants'
import cliProgress from 'cli-progress'
import * as sdk from '@iptv-org/sdk'
const data = {
channelsKeyById: new Dictionary<sdk.Models.Channel>(),
feedsKeyByStreamId: new Dictionary<sdk.Models.Feed>(),
feedsGroupedByChannelId: new Dictionary<sdk.Models.Feed[]>()
}
interface SearchIndex {
search: (query: string) => sdk.Types.ChannelSearchableData[]
}
let searchIndex: SearchIndex
async function loadData() {
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
await dataManager.loadFromDisk()
dataManager.processData()
const { channels, feeds } = dataManager.getProcessedData()
data.channelsKeyById = channels.keyBy((channel: sdk.Models.Channel) => channel.id)
data.feedsKeyByStreamId = feeds.keyBy((feed: sdk.Models.Feed) => feed.getStreamId())
data.feedsGroupedByChannelId = feeds.groupBy((feed: sdk.Models.Feed) => feed.channel)
searchIndex = sdk.SearchEngine.createIndex<sdk.Models.Channel>(channels)
}
async function downloadData() {
function formatBytes(bytes: number) {
if (bytes === 0) return '0 B'
const k = 1024
const sizes = ['B', 'KB', 'MB', 'GB']
const i = Math.floor(Math.log(bytes) / Math.log(k))
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i]
}
const files = [
'blocklist',
'categories',
'channels',
'cities',
'countries',
'feeds',
'guides',
'languages',
'logos',
'regions',
'streams',
'subdivisions',
'timezones'
]
const multiBar = new cliProgress.MultiBar({
stopOnComplete: true,
hideCursor: true,
forceRedraw: true,
barsize: 36,
format(options, params, payload) {
const filename = payload.filename.padEnd(18, ' ')
const barsize = options.barsize || 40
const percent = (params.progress * 100).toFixed(2)
const speed = payload.speed ? formatBytes(payload.speed) + '/s' : 'N/A'
const total = formatBytes(params.total)
const completeSize = Math.round(params.progress * barsize)
const incompleteSize = barsize - completeSize
const bar =
options.barCompleteString && options.barIncompleteString
? options.barCompleteString.substr(0, completeSize) +
options.barGlue +
options.barIncompleteString.substr(0, incompleteSize)
: '-'.repeat(barsize)
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
}
})
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
let requests: Promise<unknown>[] = []
for (let basename of files) {
const filename = `${basename}.json`
const progressBar = multiBar.create(0, 0, { filename })
const request = dataManager.downloadFileToDisk(basename, {
onDownloadProgress({ total, loaded, rate }) {
if (total) progressBar.setTotal(total)
progressBar.update(loaded, { speed: rate })
}
})
requests.push(request)
}
await Promise.allSettled(requests).catch(console.error)
}
function searchChannels(query: string): Collection<sdk.Models.Channel> {
if (!searchIndex) return new Collection<sdk.Models.Channel>()
const results = searchIndex.search(query)
const channels = new Collection<sdk.Models.Channel>()
new Collection<sdk.Types.ChannelSearchableData>(results).forEach(
(item: sdk.Types.ChannelSearchableData) => {
const channel = data.channelsKeyById.get(item.id)
if (channel) channels.add(channel)
}
)
return channels
}
export { data, loadData, downloadData, searchChannels }

View File

@@ -1,8 +1,9 @@
import { Logger, Collection, Storage } from '@freearhey/core' import { ChannelGuideObject } from '../../types/channel'
import { SITES_DIR, API_DIR } from '../../constants' import { SITES_DIR, API_DIR } from '../../constants'
import { GuideChannel } from '../../models' import { Logger, Collection } from '@freearhey/core'
import { ChannelsParser } from '../../core' import epgGrabber, { EPGGrabber } from 'epg-grabber'
import epgGrabber from 'epg-grabber' import { Storage } from '@freearhey/storage-js'
import { Channel } from '../../models'
import path from 'path' import path from 'path'
async function main() { async function main() {
@@ -12,24 +13,25 @@ async function main() {
logger.info('loading channels...') logger.info('loading channels...')
const sitesStorage = new Storage(SITES_DIR) const sitesStorage = new Storage(SITES_DIR)
const parser = new ChannelsParser({
storage: sitesStorage
})
const files: string[] = await sitesStorage.list('**/*.channels.xml') const files: string[] = await sitesStorage.list('**/*.channels.xml')
const channels = new Collection() const channels = new Collection<Channel>()
for (const filepath of files) { for (const filepath of files) {
const channelList = await parser.parse(filepath) const xml = await sitesStorage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelList.channels.forEach((data: epgGrabber.Channel) => { channelsFromXML.forEach((channel: Channel) => {
channels.add(new GuideChannel(data)) channels.add(channel)
}) })
} }
logger.info(`found ${channels.count()} channel(s)`) logger.info(`found ${channels.count()} channel(s)`)
const output = channels.map((channel: GuideChannel) => channel.toJSON()) const output = channels.map<ChannelGuideObject>((channel: Channel) => channel.getGuideObject())
const apiStorage = new Storage(API_DIR) const apiStorage = new Storage(API_DIR)
const outputFilename = 'guides.json' const outputFilename = 'guides.json'

View File

@@ -1,25 +1,7 @@
import { DATA_DIR } from '../../constants' import { downloadData } from '../../api'
import { Storage } from '@freearhey/core'
import { DataLoader } from '../../core'
async function main() { async function main() {
const storage = new Storage(DATA_DIR) await downloadData()
const loader = new DataLoader({ storage })
await Promise.all([
loader.download('blocklist.json'),
loader.download('categories.json'),
loader.download('channels.json'),
loader.download('countries.json'),
loader.download('languages.json'),
loader.download('regions.json'),
loader.download('subdivisions.json'),
loader.download('feeds.json'),
loader.download('timezones.json'),
loader.download('guides.json'),
loader.download('streams.json'),
loader.download('logos.json')
])
} }
main() main()

View File

@@ -1,20 +1,25 @@
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core' import { loadData, data, searchChannels } from '../../api'
import type { DataProcessorData } from '../../types/dataProcessor' import epgGrabber, { EPGGrabber } from 'epg-grabber'
import type { DataLoaderData } from '../../types/dataLoader' import { Collection, Logger } from '@freearhey/core'
import { ChannelSearchableData } from '../../types/channel'
import { Channel, ChannelList, Feed } from '../../models'
import { DataProcessor, DataLoader } from '../../core'
import { select, input } from '@inquirer/prompts' import { select, input } from '@inquirer/prompts'
import { ChannelsParser } from '../../core' import { generateChannelsXML } from '../../core'
import { DATA_DIR } from '../../constants' import { Storage } from '@freearhey/storage-js'
import { Channel } from '../../models'
import nodeCleanup from 'node-cleanup' import nodeCleanup from 'node-cleanup'
import sjs from '@freearhey/search-js' import * as sdk from '@iptv-org/sdk'
import epgGrabber from 'epg-grabber'
import { Command } from 'commander' import { Command } from 'commander'
import readline from 'readline' import readline from 'readline'
interface ChoiceValue { type: string; value?: Feed | Channel } interface ChoiceValue {
interface Choice { name: string; short?: string; value: ChoiceValue; default?: boolean } type: string
value?: sdk.Models.Feed | sdk.Models.Channel
}
interface Choice {
name: string
short?: string
value: ChoiceValue
default?: boolean
}
if (process.platform === 'win32') { if (process.platform === 'win32') {
readline readline
@@ -34,11 +39,11 @@ program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(proc
const filepath = program.args[0] const filepath = program.args[0]
const logger = new Logger() const logger = new Logger()
const storage = new Storage() const storage = new Storage()
let channelList = new ChannelList({ channels: [] }) let channelsFromXML = new Collection<Channel>()
main(filepath) main(filepath)
nodeCleanup(() => { nodeCleanup(() => {
save(filepath, channelList) save(filepath, channelsFromXML)
}) })
export default async function main(filepath: string) { export default async function main(filepath: string) {
@@ -47,67 +52,46 @@ export default async function main(filepath: string) {
} }
logger.info('loading data from api...') logger.info('loading data from api...')
const processor = new DataProcessor() await loadData()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
processor.process(data)
logger.info('loading channels...') logger.info('loading channels...')
const parser = new ChannelsParser({ storage }) const xml = await storage.load(filepath)
channelList = await parser.parse(filepath) const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const parsedChannelsWithoutId = channelList.channels.filter( channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => !channel.xmltv_id (channel: epgGrabber.Channel) => new Channel(channel.toObject())
) )
const channelsFromXMLWithoutId = channelsFromXML.filter((channel: Channel) => !channel.xmltv_id)
logger.info( logger.info(
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)` `found ${channelsFromXML.count()} channels (including ${channelsFromXMLWithoutId.count()} without ID)`
) )
logger.info('creating search index...') logger.info('starting...')
const items = channels.map((channel: Channel) => channel.getSearchable()).all() console.log()
const searchIndex = sjs.createIndex(items, {
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
})
logger.info('starting...\n') for (const channel of channelsFromXMLWithoutId.all()) {
for (const channel of parsedChannelsWithoutId.all()) {
try { try {
channel.xmltv_id = await selectChannel( channel.xmltv_id = await selectChannel(channel)
channel, } catch {
searchIndex,
feedsGroupedByChannelId,
channelsKeyById
)
} catch (err) {
logger.info(err.message)
break break
} }
} }
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => { channelsFromXMLWithoutId.forEach((channel: epgGrabber.Channel) => {
if (channel.xmltv_id === '-') { if (channel.xmltv_id === '-') {
channel.xmltv_id = '' channel.xmltv_id = ''
} }
}) })
} }
async function selectChannel( async function selectChannel(channel: epgGrabber.Channel): Promise<string> {
channel: epgGrabber.Channel,
searchIndex,
feedsGroupedByChannelId: Dictionary,
channelsKeyById: Dictionary
): Promise<string> {
const query = escapeRegex(channel.name) const query = escapeRegex(channel.name)
const similarChannels = searchIndex const similarChannels = searchChannels(query)
.search(query) const choices = getChoicesForChannel(similarChannels).all()
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
const selected: ChoiceValue = await select({ const selected: ChoiceValue = await select({
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`, message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
choices: getChannelChoises(new Collection(similarChannels)), choices,
pageSize: 10 pageSize: 10
}) })
@@ -117,14 +101,14 @@ async function selectChannel(
case 'type': { case 'type': {
const typedChannelId = await input({ message: ' Channel ID:' }) const typedChannelId = await input({ message: ' Channel ID:' })
if (!typedChannelId) return '' if (!typedChannelId) return ''
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId) const selectedFeedId = await selectFeed(typedChannelId)
if (selectedFeedId === '-') return typedChannelId if (selectedFeedId === '-') return typedChannelId
return [typedChannelId, selectedFeedId].join('@') return [typedChannelId, selectedFeedId].join('@')
} }
case 'channel': { case 'channel': {
const selectedChannel = selected.value const selectedChannel = selected.value
if (!selectedChannel) return '' if (!selectedChannel) return ''
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId) const selectedFeedId = await selectFeed(selectedChannel.id || '')
if (selectedFeedId === '-') return selectedChannel.id || '' if (selectedFeedId === '-') return selectedChannel.id || ''
return [selectedChannel.id, selectedFeedId].join('@') return [selectedChannel.id, selectedFeedId].join('@')
} }
@@ -133,11 +117,9 @@ async function selectChannel(
return '' return ''
} }
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> { async function selectFeed(channelId: string): Promise<string> {
const channelFeeds = feedsGroupedByChannelId.has(channelId) const channelFeeds = new Collection(data.feedsGroupedByChannelId.get(channelId))
? new Collection(feedsGroupedByChannelId.get(channelId)) const choices = getChoicesForFeed(channelFeeds).all()
: new Collection()
const choices = getFeedChoises(channelFeeds)
const selected: ChoiceValue = await select({ const selected: ChoiceValue = await select({
message: `Select feed ID for "${channelId}":`, message: `Select feed ID for "${channelId}":`,
@@ -159,13 +141,13 @@ async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary
return '' return ''
} }
function getChannelChoises(channels: Collection): Choice[] { function getChoicesForChannel(channels: Collection<sdk.Models.Channel>): Collection<Choice> {
const choises: Choice[] = [] const choices = new Collection<Choice>()
channels.forEach((channel: Channel) => { channels.forEach((channel: sdk.Models.Channel) => {
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ') const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
choises.push({ choices.add({
value: { value: {
type: 'channel', type: 'channel',
value: channel value: channel
@@ -175,40 +157,42 @@ function getChannelChoises(channels: Collection): Choice[] {
}) })
}) })
choises.push({ name: 'Type...', value: { type: 'type' } }) choices.add({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } }) choices.add({ name: 'Skip', value: { type: 'skip' } })
return choises return choices
} }
function getFeedChoises(feeds: Collection): Choice[] { function getChoicesForFeed(feeds: Collection<sdk.Models.Feed>): Collection<Choice> {
const choises: Choice[] = [] const choices = new Collection<Choice>()
feeds.forEach((feed: Feed) => { feeds.forEach((feed: sdk.Models.Feed) => {
let name = `${feed.id} (${feed.name})` let name = `${feed.id} (${feed.name})`
if (feed.isMain) name += ' [main]' if (feed.is_main) name += ' [main]'
choises.push({ choices.add({
value: { value: {
type: 'feed', type: 'feed',
value: feed value: feed
}, },
default: feed.isMain, default: feed.is_main,
name, name,
short: feed.id short: feed.id
}) })
}) })
choises.push({ name: 'Type...', value: { type: 'type' } }) choices.add({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } }) choices.add({ name: 'Skip', value: { type: 'skip' } })
return choises return choices
} }
function save(filepath: string, channelList: ChannelList) { function save(filepath: string, channelsFromXML: Collection<Channel>) {
if (!storage.existsSync(filepath)) return if (!storage.existsSync(filepath)) return
storage.saveSync(filepath, channelList.toString()) const xml = generateChannelsXML(channelsFromXML)
logger.info(`\nFile '${filepath}' successfully saved`) storage.saveSync(filepath, xml)
console.log()
logger.info(`File '${filepath}' successfully saved`)
} }
function escapeRegex(string: string) { function escapeRegex(string: string) {

View File

@@ -0,0 +1,60 @@
import { Collection, Logger } from '@freearhey/core'
import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { generateChannelsXML } from '../../core'
import { Storage } from '@freearhey/storage-js'
import { SITES_DIR } from '../../constants'
import { data, loadData } from '../../api'
import { Channel } from '../../models'
import { program } from 'commander'
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
await loadData()
logger.info('loading *.channels.xml files...')
const storage = new Storage()
const files = program.args.length
? program.args
: await storage.list(`${SITES_DIR}/**/*.channels.xml`)
logger.info(`found ${files.length} file(s)`)
logger.info('formating...')
for (const filepath of files) {
if (!storage.existsSync(filepath)) continue
const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channelsFromXML.forEach((channel: Channel) => {
if (!channel.xmltv_id) return
if (data.feedsKeyByStreamId.get(channel.xmltv_id)) return
const channelData = data.channelsKeyById.get(channel.xmltv_id)
if (channelData) {
const mainFeed = channelData.getMainFeed()
if (mainFeed) {
channel.xmltv_id = mainFeed.getStreamId()
return
}
}
channel.xmltv_id = ''
})
channelsFromXML.sortBy((channel: Channel) => channel.site_id)
const output = generateChannelsXML(channelsFromXML)
await storage.save(filepath, output)
}
}
main()

View File

@@ -1,7 +1,7 @@
import chalk from 'chalk'
import { program } from 'commander'
import { Storage, File } from '@freearhey/core'
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm' import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
import { Storage, File } from '@freearhey/storage-js'
import { program } from 'commander'
import chalk from 'chalk'
const xsd = `<?xml version="1.0" encoding="UTF-8"?> const xsd = `<?xml version="1.0" encoding="UTF-8"?>
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified"> <xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">

View File

@@ -1,10 +1,21 @@
import { Logger, File, Storage } from '@freearhey/core' import { Storage, File } from '@freearhey/storage-js'
import { ChannelsParser } from '../../core' import { Collection, Logger } from '@freearhey/core'
import { ChannelList } from '../../models' import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { generateChannelsXML } from '../../core'
import { pathToFileURL } from 'node:url' import { pathToFileURL } from 'node:url'
import epgGrabber from 'epg-grabber' import { Channel } from '../../models'
import { Command } from 'commander' import { Command } from 'commander'
interface SiteConfigChannelData {
xmltv_id: string
name: string
site_id: string
lang?: string
logo?: string
url?: string
lcn?: string
}
const program = new Command() const program = new Command()
program program
.requiredOption('-c, --config <config>', 'Config file') .requiredOption('-c, --config <config>', 'Config file')
@@ -32,17 +43,11 @@ async function main() {
const storage = new Storage() const storage = new Storage()
const logger = new Logger() const logger = new Logger()
const parser = new ChannelsParser({ storage })
const file = new File(options.config) const file = new File(options.config)
const dir = file.dirname() const dir = file.dirname()
const config = (await import(pathToFileURL(options.config).toString())).default const config = (await import(pathToFileURL(options.config).toString())).default
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml` const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
let channelList = new ChannelList({ channels: [] })
if (await storage.exists(outputFilepath)) {
channelList = await parser.parse(outputFilepath)
}
const args: Record<string, string> = {} const args: Record<string, string> = {}
if (Array.isArray(options.set)) { if (Array.isArray(options.set)) {
@@ -52,21 +57,43 @@ async function main() {
}) })
} }
let parsedChannels = config.channels(args) let channelsFromXML = new Collection<Channel>()
if (isPromise(parsedChannels)) { if (await storage.exists(outputFilepath)) {
parsedChannels = await parsedChannels const xml = await storage.load(outputFilepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
} }
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
channel.site = config.site
return channel let configChannels = config.channels(args)
}) if (isPromise(configChannels)) {
configChannels = await configChannels
}
const newChannelList = new ChannelList({ channels: [] }) const channelsFromConfig = new Collection<SiteConfigChannelData>(configChannels).map(
parsedChannels.forEach((channel: epgGrabber.Channel) => { (data: SiteConfigChannelData) => {
return new Channel({
xmltv_id: data.xmltv_id,
name: data.name,
site_id: data.site_id,
lang: data.lang || null,
logo: data.logo || null,
url: data.url || null,
lcn: data.lcn || null,
site: config.site,
index: -1
})
}
)
const newChannelList = new Collection<Channel>()
channelsFromConfig.forEach((channel: Channel) => {
if (!channel.site_id) return if (!channel.site_id) return
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id) const found: Channel | undefined = channelsFromXML.find(
(_channel: Channel) => _channel.site_id == channel.site_id
)
if (found) { if (found) {
channel.xmltv_id = found.xmltv_id channel.xmltv_id = found.xmltv_id
@@ -76,9 +103,15 @@ async function main() {
newChannelList.add(channel) newChannelList.add(channel)
}) })
newChannelList.sort() newChannelList.sortBy([
(channel: Channel) => channel.lang || '_',
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
(channel: Channel) => channel.site_id
])
await storage.save(outputFilepath, newChannelList.toString()) const xml = generateChannelsXML(newChannelList)
await storage.save(outputFilepath, xml)
logger.info(`File '${outputFilepath}' successfully saved`) logger.info(`File '${outputFilepath}' successfully saved`)
} }

View File

@@ -1,10 +1,8 @@
import { ChannelsParser, DataLoader, DataProcessor } from '../../core' import { Collection, Dictionary } from '@freearhey/core'
import { DataProcessorData } from '../../types/dataProcessor' import { Storage, File } from '@freearhey/storage-js'
import { Storage, Dictionary, File } from '@freearhey/core' import epgGrabber, { EPGGrabber } from 'epg-grabber'
import { DataLoaderData } from '../../types/dataLoader' import { loadData, data } from '../../api'
import { ChannelList } from '../../models' import { Channel } from '../../models'
import { DATA_DIR } from '../../constants'
import epgGrabber from 'epg-grabber'
import { program } from 'commander' import { program } from 'commander'
import chalk from 'chalk' import chalk from 'chalk'
import langs from 'langs' import langs from 'langs'
@@ -14,21 +12,15 @@ program.argument('[filepath...]', 'Path to *.channels.xml files to validate').pa
interface ValidationError { interface ValidationError {
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang' type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
name: string name: string
lang?: string lang: string | null
xmltv_id?: string xmltv_id: string | null
site_id?: string site_id: string | null
logo?: string logo: string | null
} }
async function main() { async function main() {
const processor = new DataProcessor() await loadData()
const dataStorage = new Storage(DATA_DIR) const { channelsKeyById, feedsKeyByStreamId } = data
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
const parser = new ChannelsParser({
storage: new Storage()
})
let totalFiles = 0 let totalFiles = 0
let totalErrors = 0 let totalErrors = 0
@@ -40,21 +32,25 @@ async function main() {
const file = new File(filepath) const file = new File(filepath)
if (file.extension() !== 'xml') continue if (file.extension() !== 'xml') continue
const channelList: ChannelList = await parser.parse(filepath) const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const channelList = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
const bufferBySiteId = new Dictionary() const bufferBySiteId = new Dictionary()
const errors: ValidationError[] = [] const errors: ValidationError[] = []
channelList.channels.forEach((channel: epgGrabber.Channel) => { channelList.forEach((channel: Channel) => {
const bufferId: string = channel.site_id const bufferId: string = channel.site_id
if (bufferBySiteId.missing(bufferId)) { if (bufferBySiteId.missing(bufferId)) {
bufferBySiteId.set(bufferId, true) bufferBySiteId.set(bufferId, true)
} else { } else {
errors.push({ type: 'duplicate', ...channel }) errors.push({ type: 'duplicate', ...channel.toObject() })
totalErrors++ totalErrors++
} }
if (!langs.where('1', channel.lang ?? '')) { if (!langs.where('1', channel.lang ?? '')) {
errors.push({ type: 'wrong_lang', ...channel }) errors.push({ type: 'wrong_lang', ...channel.toObject() })
totalErrors++ totalErrors++
} }
@@ -63,14 +59,14 @@ async function main() {
const foundChannel = channelsKeyById.get(channelId) const foundChannel = channelsKeyById.get(channelId)
if (!foundChannel) { if (!foundChannel) {
errors.push({ type: 'wrong_channel_id', ...channel }) errors.push({ type: 'wrong_channel_id', ...channel.toObject() })
totalWarnings++ totalWarnings++
} }
if (feedId) { if (feedId) {
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id) const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
if (!foundFeed) { if (!foundFeed) {
errors.push({ type: 'wrong_feed_id', ...channel }) errors.push({ type: 'wrong_feed_id', ...channel.toObject() })
totalWarnings++ totalWarnings++
} }
} }

View File

@@ -1,10 +1,16 @@
import { Logger, Timer, Storage, Collection } from '@freearhey/core' import { Logger, Timer, Collection, Template } from '@freearhey/core'
import { QueueCreator, Job, ChannelsParser } from '../../core' import epgGrabber, { EPGGrabber, EPGGrabberMock } from 'epg-grabber'
import { loadJs, parseProxy, SiteConfig, Queue } from '../../core'
import { Channel, Guide, Program } from '../../models'
import { SocksProxyAgent } from 'socks-proxy-agent'
import { PromisyClass, TaskQueue } from 'cwait'
import { Storage } from '@freearhey/storage-js'
import { QueueItem } from '../../types/queue'
import { Option, program } from 'commander' import { Option, program } from 'commander'
import { SITES_DIR } from '../../constants' import { SITES_DIR } from '../../constants'
import { Channel } from 'epg-grabber' import { data, loadData } from '../../api'
import dayjs, { Dayjs } from 'dayjs'
import path from 'path' import path from 'path'
import { ChannelList } from '../../models'
program program
.addOption(new Option('-s, --site <name>', 'Name of the site to parse')) .addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
@@ -17,14 +23,14 @@ program
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml')) .addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)')) .addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
.addOption( .addOption(
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request').env( new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request')
'TIMEOUT' .env('TIMEOUT')
) .argParser(parseInt)
) )
.addOption( .addOption(
new Option('-d, --delay <milliseconds>', 'Override the default delay between request').env( new Option('-d, --delay <milliseconds>', 'Override the default delay between request')
'DELAY' .env('DELAY')
) .argParser(parseInt)
) )
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY')) .addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
.addOption( .addOption(
@@ -32,12 +38,13 @@ program
'--days <days>', '--days <days>',
'Override the number of days for which the program will be loaded (defaults to the value from the site config)' 'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
) )
.argParser(value => parseInt(value)) .argParser(parseInt)
.env('DAYS') .env('DAYS')
) )
.addOption( .addOption(
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests') new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
.default(1) .default(1)
.argParser(parseInt)
.env('MAX_CONNECTIONS') .env('MAX_CONNECTIONS')
) )
.addOption( .addOption(
@@ -48,15 +55,15 @@ program
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL')) .addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
.parse() .parse()
export interface GrabOptions { interface GrabOptions {
site?: string site?: string
channels?: string channels?: string
output: string output: string
gzip: boolean gzip: boolean
curl: boolean curl: boolean
maxConnections: number maxConnections: number
timeout?: string timeout?: number
delay?: string delay?: number
lang?: string lang?: string
days?: number days?: number
proxy?: string proxy?: string
@@ -70,14 +77,13 @@ async function main() {
const logger = new Logger() const logger = new Logger()
logger.start('starting...') logger.info('starting...')
logger.info('config:') logger.info('config:')
logger.tree(options) logger.tree(options)
logger.info('loading channels...') logger.info('loading channels...')
const storage = new Storage() const storage = new Storage()
const parser = new ChannelsParser({ storage })
let files: string[] = [] let files: string[] = []
if (options.site) { if (options.site) {
@@ -88,46 +94,196 @@ async function main() {
files = await storage.list(options.channels) files = await storage.list(options.channels)
} }
let channels = new Collection() let channelsFromXML = new Collection<Channel>()
for (const filepath of files) { for (const filepath of files) {
const channelList: ChannelList = await parser.parse(filepath) const xml = await storage.load(filepath)
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
const _channelsFromXML = new Collection(parsedChannels).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
channels = channels.concat(channelList.channels) channelsFromXML.concat(_channelsFromXML)
} }
if (options.lang) { if (options.lang) {
channels = channels.filter((channel: Channel) => { channelsFromXML = channelsFromXML.filter((channel: Channel) => {
if (!options.lang || !channel.lang) return true if (!options.lang) return true
return options.lang.includes(channel.lang) return options.lang.includes(channel.lang)
}) })
} }
logger.info(` found ${channels.count()} channel(s)`) logger.info(`found ${channelsFromXML.count()} channel(s)`)
logger.info('loading api data...')
await loadData()
logger.info('creating queue...')
let index = 0
const queue = new Queue()
for (const channel of channelsFromXML.all()) {
channel.index = index++
if (!channel.site || !channel.site_id || !channel.name) continue
const configObject = await loadJs(channel.getConfigPath())
const siteConfig = new SiteConfig(configObject)
siteConfig.filepath = channel.getConfigPath()
if (options.timeout !== undefined) {
siteConfig.request = { ...siteConfig.request, ...{ timeout: options.timeout } }
}
if (options.delay !== undefined) siteConfig.delay = options.delay
if (options.curl !== undefined) siteConfig.curl = options.curl
if (options.proxy !== undefined) {
const proxy = parseProxy(options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(options.proxy)
siteConfig.request = {
...siteConfig.request,
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
}
} else {
siteConfig.request = { ...siteConfig.request, ...{ proxy } }
}
}
if (!channel.xmltv_id) channel.xmltv_id = channel.site_id
const days = options.days || siteConfig.days || 1
const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
dates.forEach((date: Dayjs) => {
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${date.toJSON()}`
if (queue.has(key)) return
queue.add(key, {
channel,
date,
siteConfig,
error: null
})
})
}
const grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
const taskQueue = new TaskQueue(Promise as PromisyClass, options.maxConnections)
const queueItems = queue.getItems()
const channels = new Collection<Channel>()
const programs = new Collection<Program>()
let i = 1
const total = queueItems.count()
const requests = queueItems.map(
taskQueue.wrap(async (queueItem: QueueItem) => {
const { channel, siteConfig, date } = queueItem
if (!channel.logo) {
if (siteConfig.logo) {
channel.logo = await grabber.loadLogo(channel, date)
} else {
channel.logo = getLogoForChannel(channel)
}
}
channels.add(channel)
const channelPrograms = await grabber.grab(
channel,
date,
siteConfig,
(context: epgGrabber.Types.GrabCallbackContext, error: Error | null) => {
logger.info(
` [${i}/${total}] ${context.channel.site} (${context.channel.lang}) - ${
context.channel.xmltv_id
} - ${context.date.format('MMM D, YYYY')} (${context.programs.length} programs)`
)
if (i < total) i++
if (error) {
logger.info(` ERR: ${error.message}`)
}
}
)
const _programs = new Collection<epgGrabber.Program>(channelPrograms).map<Program>(
program => new Program(program.toObject())
)
programs.concat(_programs)
})
)
logger.info('run:') logger.info('run:')
runJob({ logger, channels })
const timer = new Timer()
timer.start()
await Promise.all(requests.all())
const pathTemplate = new Template(options.output)
const channelsGroupedByKey = channels
.sortBy([(channel: Channel) => channel.index, (channel: Channel) => channel.xmltv_id])
.uniqBy((channel: Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`)
.groupBy((channel: Channel) => {
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
})
const programsGroupedByKey = programs
.sortBy([(program: Program) => program.channel, (program: Program) => program.start])
.groupBy((program: Program) => {
const lang =
program.titles && program.titles.length && program.titles[0].lang
? program.titles[0].lang
: 'en'
return pathTemplate.format({ lang, site: program.site || '' })
})
for (const groupKey of channelsGroupedByKey.keys()) {
const groupChannels = new Collection(channelsGroupedByKey.get(groupKey))
const groupPrograms = new Collection(programsGroupedByKey.get(groupKey))
const guide = new Guide({
filepath: groupKey,
gzip: options.gzip,
channels: groupChannels,
programs: groupPrograms
})
await guide.save({ logger })
}
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
} }
main() main()
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) { function getLogoForChannel(channel: Channel): string | null {
const timer = new Timer() const feedData = data.feedsKeyByStreamId.get(channel.xmltv_id)
timer.start() if (feedData) {
const firstLogo = feedData.getLogos().first()
if (firstLogo) return firstLogo.url
}
const queueCreator = new QueueCreator({ const [channelId] = channel.xmltv_id.split('@')
channels, const channelData = data.channelsKeyById.get(channelId)
logger, if (channelData) {
options const firstLogo = channelData.getLogos().first()
}) if (firstLogo) return firstLogo.url
const queue = await queueCreator.create() }
const job = new Job({
queue,
logger,
options
})
await job.run() return null
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
} }

View File

@@ -1,5 +1,6 @@
import { Logger, Storage } from '@freearhey/core' import { SITES_DIR, EOL } from '../../constants'
import { SITES_DIR } from '../../constants' import { Storage } from '@freearhey/storage-js'
import { Logger } from '@freearhey/core'
import { pathToFileURL } from 'node:url' import { pathToFileURL } from 'node:url'
import { program } from 'commander' import { program } from 'commander'
import fs from 'fs-extra' import fs from 'fs-extra'
@@ -12,7 +13,7 @@ async function main() {
const storage = new Storage(SITES_DIR) const storage = new Storage(SITES_DIR)
const logger = new Logger() const logger = new Logger()
logger.info(`Initializing "${domain}"...\r\n`) logger.info(`Initializing "${domain}"...${EOL}`)
const dir = domain const dir = domain
if (await storage.exists(dir)) { if (await storage.exists(dir)) {
@@ -39,7 +40,7 @@ async function main() {
}) })
await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain)) await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain))
logger.info('\r\nDone') logger.info(`${EOL}Done`)
} }
main() main()

View File

@@ -1,25 +1,22 @@
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core' import { HTMLTableDataItem, HTMLTableRow, HTMLTableColumn } from '../../types/htmlTable'
import { Logger, Storage, Collection } from '@freearhey/core'
import { ChannelList, Issue, Site } from '../../models'
import { SITES_DIR, ROOT_DIR } from '../../constants' import { SITES_DIR, ROOT_DIR } from '../../constants'
import { Channel } from 'epg-grabber' import { Logger, Collection } from '@freearhey/core'
import { Issue, Site, Channel } from '../../models'
import { HTMLTable, loadIssues } from '../../core'
import { Storage } from '@freearhey/storage-js'
import * as epgGrabber from 'epg-grabber'
import { EPGGrabber } from 'epg-grabber'
async function main() { async function main() {
const logger = new Logger({ level: -999 }) const logger = new Logger({ level: -999 })
const issueLoader = new IssueLoader()
const sitesStorage = new Storage(SITES_DIR) const sitesStorage = new Storage(SITES_DIR)
const sites = new Collection() const sites = new Collection<Site>()
logger.info('loading channels...')
const channelsParser = new ChannelsParser({
storage: sitesStorage
})
logger.info('loading list of sites') logger.info('loading list of sites')
const folders = await sitesStorage.list('*/') const folders = await sitesStorage.list('*/')
logger.info('loading issues...') logger.info('loading issues...')
const issues = await issueLoader.load() const issues = await loadIssues()
logger.info('putting the data together...') logger.info('putting the data together...')
const brokenGuideReports = issues.filter(issue => const brokenGuideReports = issues.filter(issue =>
@@ -37,36 +34,43 @@ async function main() {
const files = await sitesStorage.list(`${domain}/*.channels.xml`) const files = await sitesStorage.list(`${domain}/*.channels.xml`)
for (const filepath of files) { for (const filepath of files) {
const channelList: ChannelList = await channelsParser.parse(filepath) const xml = await sitesStorage.load(filepath)
const channelsFromXML = EPGGrabber.parseChannelsXML(xml)
const channels = new Collection(channelsFromXML).map(
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
)
site.totalChannels += channelList.channels.count() site.totalChannels += channels.count()
site.markedChannels += channelList.channels site.markedChannels += channels.filter((channel: Channel) => channel.xmltv_id).count()
.filter((channel: Channel) => channel.xmltv_id)
.count()
} }
sites.add(site) sites.add(site)
} }
logger.info('creating sites table...') logger.info('creating sites table...')
const tableData = new Collection() const rows = new Collection<HTMLTableRow>()
sites.forEach((site: Site) => { sites.forEach((site: Site) => {
tableData.add([ rows.add(
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` }, new Collection<HTMLTableDataItem>([
{ value: site.totalChannels, align: 'right' }, { value: `<a href="sites/${site.domain}">${site.domain}</a>` },
{ value: site.markedChannels, align: 'right' }, { value: site.totalChannels.toString(), align: 'right' },
{ value: site.getStatus().emoji, align: 'center' }, { value: site.markedChannels.toString(), align: 'right' },
{ value: site.getIssues().all().join(', ') } { value: site.getStatus().emoji, align: 'center' },
]) { value: site.getIssueUrls().all().join(', ') }
])
)
}) })
logger.info('updating sites.md...') logger.info('updating sites.md...')
const table = new HTMLTable(tableData.all(), [ const table = new HTMLTable(
{ name: 'Site', align: 'left' }, rows,
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' }, new Collection<HTMLTableColumn>([
{ name: 'Status', align: 'left' }, { name: 'Site', align: 'left' },
{ name: 'Notes', align: 'left' } { name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
]) { name: 'Status', align: 'left' },
{ name: 'Notes', align: 'left' }
])
)
const rootStorage = new Storage(ROOT_DIR) const rootStorage = new Storage(ROOT_DIR)
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md') const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString()) const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())

View File

@@ -7,3 +7,4 @@ export const DOT_SITES_DIR = process.env.DOT_SITES_DIR || './.sites'
export const TESTING = process.env.NODE_ENV === 'test' ? true : false export const TESTING = process.env.NODE_ENV === 'test' ? true : false
export const OWNER = 'iptv-org' export const OWNER = 'iptv-org'
export const REPO = 'epg' export const REPO = 'epg'
export const EOL = '\r\n'

View File

@@ -1,16 +0,0 @@
import axios, { AxiosInstance, AxiosResponse, AxiosRequestConfig } from 'axios'
export class ApiClient {
instance: AxiosInstance
constructor() {
this.instance = axios.create({
baseURL: 'https://iptv-org.github.io/api',
responseType: 'stream'
})
}
get(url: string, options: AxiosRequestConfig): Promise<AxiosResponse> {
return this.instance.get(url, options)
}
}

View File

@@ -1,22 +0,0 @@
import { parseChannels } from 'epg-grabber'
import { Storage } from '@freearhey/core'
import { ChannelList } from '../models'
interface ChannelsParserProps {
storage: Storage
}
export class ChannelsParser {
storage: Storage
constructor({ storage }: ChannelsParserProps) {
this.storage = storage
}
async parse(filepath: string): Promise<ChannelList> {
const content = await this.storage.load(filepath)
const parsed = parseChannels(content)
return new ChannelList({ channels: parsed })
}
}

View File

@@ -1,32 +0,0 @@
import { SiteConfig } from 'epg-grabber'
import { pathToFileURL } from 'url'
export class ConfigLoader {
async load(filepath: string): Promise<SiteConfig> {
const fileUrl = pathToFileURL(filepath).toString()
const config = (await import(fileUrl)).default
const defaultConfig = {
days: 1,
delay: 0,
output: 'guide.xml',
request: {
method: 'GET',
maxContentLength: 5242880,
timeout: 30000,
withCredentials: true,
jar: null,
responseType: 'arraybuffer',
cache: false,
headers: null,
data: null
},
maxConnections: 1,
site: undefined,
url: undefined,
parser: undefined,
channels: undefined
}
return { ...defaultConfig, ...config } as SiteConfig
}
}

View File

@@ -1,103 +0,0 @@
import type { DataLoaderProps, DataLoaderData } from '../types/dataLoader'
import cliProgress, { MultiBar } from 'cli-progress'
import { Storage } from '@freearhey/core'
import { ApiClient } from './apiClient'
import numeral from 'numeral'
export class DataLoader {
client: ApiClient
storage: Storage
progressBar: MultiBar
constructor(props: DataLoaderProps) {
this.client = new ApiClient()
this.storage = props.storage
this.progressBar = new cliProgress.MultiBar({
stopOnComplete: true,
hideCursor: true,
forceRedraw: true,
barsize: 36,
format(options, params, payload) {
const filename = payload.filename.padEnd(18, ' ')
const barsize = options.barsize || 40
const percent = (params.progress * 100).toFixed(2)
const speed = payload.speed ? numeral(payload.speed).format('0.0 b') + '/s' : 'N/A'
const total = numeral(params.total).format('0.0 b')
const completeSize = Math.round(params.progress * barsize)
const incompleteSize = barsize - completeSize
const bar =
options.barCompleteString && options.barIncompleteString
? options.barCompleteString.substr(0, completeSize) +
options.barGlue +
options.barIncompleteString.substr(0, incompleteSize)
: '-'.repeat(barsize)
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
}
})
}
async load(): Promise<DataLoaderData> {
const [
countries,
regions,
subdivisions,
languages,
categories,
blocklist,
channels,
feeds,
timezones,
guides,
streams,
logos
] = await Promise.all([
this.storage.json('countries.json'),
this.storage.json('regions.json'),
this.storage.json('subdivisions.json'),
this.storage.json('languages.json'),
this.storage.json('categories.json'),
this.storage.json('blocklist.json'),
this.storage.json('channels.json'),
this.storage.json('feeds.json'),
this.storage.json('timezones.json'),
this.storage.json('guides.json'),
this.storage.json('streams.json'),
this.storage.json('logos.json')
])
return {
countries,
regions,
subdivisions,
languages,
categories,
blocklist,
channels,
feeds,
timezones,
guides,
streams,
logos
}
}
async download(filename: string) {
if (!this.storage || !this.progressBar) return
const stream = await this.storage.createStream(filename)
const progressBar = this.progressBar.create(0, 0, { filename })
this.client
.get(filename, {
responseType: 'stream',
onDownloadProgress({ total, loaded, rate }) {
if (total) progressBar.setTotal(total)
progressBar.update(loaded, { speed: rate })
}
})
.then(response => {
response.data.pipe(stream)
})
}
}

View File

@@ -1,55 +0,0 @@
import { Channel, Feed, GuideChannel, Logo, Stream } from '../models'
import { DataLoaderData } from '../types/dataLoader'
import { Collection } from '@freearhey/core'
export class DataProcessor {
process(data: DataLoaderData) {
let channels = new Collection(data.channels).map(data => new Channel(data))
const channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
const guideChannels = new Collection(data.guides).map(data => new GuideChannel(data))
const guideChannelsGroupedByStreamId = guideChannels.groupBy((channel: GuideChannel) =>
channel.getStreamId()
)
const streams = new Collection(data.streams).map(data => new Stream(data))
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
let feeds = new Collection(data.feeds).map(data =>
new Feed(data)
.withGuideChannels(guideChannelsGroupedByStreamId)
.withStreams(streamsGroupedById)
.withChannel(channelsKeyById)
)
const feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
const logos = new Collection(data.logos).map(data =>
new Logo(data).withFeed(feedsKeyByStreamId)
)
const logosGroupedByChannelId = logos.groupBy((logo: Logo) => logo.channelId)
const logosGroupedByStreamId = logos.groupBy((logo: Logo) => logo.getStreamId())
feeds = feeds.map((feed: Feed) => feed.withLogos(logosGroupedByStreamId))
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
channels = channels.map((channel: Channel) =>
channel.withFeeds(feedsGroupedByChannelId).withLogos(logosGroupedByChannelId)
)
return {
guideChannelsGroupedByStreamId,
feedsGroupedByChannelId,
logosGroupedByChannelId,
logosGroupedByStreamId,
streamsGroupedById,
feedsKeyByStreamId,
channelsKeyById,
guideChannels,
channels,
streams,
feeds,
logos
}
}
}

View File

@@ -1,14 +0,0 @@
import dayjs from 'dayjs'
import utc from 'dayjs/plugin/utc'
dayjs.extend(utc)
const date = {}
date.getUTC = function (d = null) {
if (typeof d === 'string') return dayjs.utc(d).startOf('d')
return dayjs.utc().startOf('d')
}
export default date

View File

@@ -1,105 +0,0 @@
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
import { Logger, Collection } from '@freearhey/core'
import { Queue, ProxyParser } from './'
import { GrabOptions } from '../commands/epg/grab'
import { TaskQueue, PromisyClass } from 'cwait'
import { SocksProxyAgent } from 'socks-proxy-agent'
interface GrabberProps {
logger: Logger
queue: Queue
options: GrabOptions
}
export class Grabber {
logger: Logger
queue: Queue
options: GrabOptions
grabber: EPGGrabber | EPGGrabberMock
constructor({ logger, queue, options }: GrabberProps) {
this.logger = logger
this.queue = queue
this.options = options
this.grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
}
async grab(): Promise<{ channels: Collection; programs: Collection }> {
const proxyParser = new ProxyParser()
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
const total = this.queue.size()
const channels = new Collection()
let programs = new Collection()
let i = 1
await Promise.all(
this.queue.items().map(
taskQueue.wrap(
async (queueItem: { channel: Channel; config: SiteConfig; date: string }) => {
const { channel, config, date } = queueItem
channels.add(channel)
if (this.options.timeout !== undefined) {
const timeout = parseInt(this.options.timeout)
config.request = { ...config.request, ...{ timeout } }
}
if (this.options.delay !== undefined) {
const delay = parseInt(this.options.delay)
config.delay = delay
}
if (this.options.proxy !== undefined) {
const proxy = proxyParser.parse(this.options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(this.options.proxy)
config.request = {
...config.request,
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
}
} else {
config.request = { ...config.request, ...{ proxy } }
}
}
if (this.options.curl === true) {
config.curl = true
}
const _programs = await this.grabber.grab(
channel,
date,
config,
(data: GrabCallbackData, error: Error | null) => {
const { programs, date } = data
this.logger.info(
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${
channel.xmltv_id
} - ${date.format('MMM D, YYYY')} (${programs.length} programs)`
)
if (i < total) i++
if (error) {
this.logger.info(` ERR: ${error.message}`)
}
}
)
programs = programs.concat(new Collection(_programs))
}
)
)
)
return { channels, programs }
}
}

View File

@@ -1,111 +0,0 @@
import { Collection, Logger, Zip, Storage, StringTemplate } from '@freearhey/core'
import epgGrabber from 'epg-grabber'
import { OptionValues } from 'commander'
import { Channel, Feed, Guide } from '../models'
import path from 'path'
import { DataLoader, DataProcessor } from '.'
import { DataLoaderData } from '../types/dataLoader'
import { DataProcessorData } from '../types/dataProcessor'
import { DATA_DIR } from '../constants'
interface GuideManagerProps {
options: OptionValues
logger: Logger
channels: Collection
programs: Collection
}
export class GuideManager {
options: OptionValues
logger: Logger
channels: Collection
programs: Collection
constructor({ channels, programs, logger, options }: GuideManagerProps) {
this.options = options
this.logger = logger
this.channels = channels
this.programs = programs
}
async createGuides() {
const pathTemplate = new StringTemplate(this.options.output)
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { feedsKeyByStreamId, channelsKeyById }: DataProcessorData = processor.process(data)
const groupedChannels = this.channels
.map((channel: epgGrabber.Channel) => {
if (channel.xmltv_id && !channel.icon) {
const foundFeed: Feed = feedsKeyByStreamId.get(channel.xmltv_id)
if (foundFeed && foundFeed.hasLogo()) {
channel.icon = foundFeed.getLogoUrl()
} else {
const [channelId] = channel.xmltv_id.split('@')
const foundChannel: Channel = channelsKeyById.get(channelId)
if (foundChannel && foundChannel.hasLogo()) {
channel.icon = foundChannel.getLogoUrl()
}
}
}
return channel
})
.orderBy([
(channel: epgGrabber.Channel) => channel.index,
(channel: epgGrabber.Channel) => channel.xmltv_id
])
.uniqBy(
(channel: epgGrabber.Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`
)
.groupBy((channel: epgGrabber.Channel) => {
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
})
const groupedPrograms = this.programs
.orderBy([
(program: epgGrabber.Program) => program.channel,
(program: epgGrabber.Program) => program.start
])
.groupBy((program: epgGrabber.Program) => {
const lang =
program.titles && program.titles.length && program.titles[0].lang
? program.titles[0].lang
: 'en'
return pathTemplate.format({ lang, site: program.site || '' })
})
for (const groupKey of groupedPrograms.keys()) {
const guide = new Guide({
filepath: groupKey,
gzip: this.options.gzip,
channels: new Collection(groupedChannels.get(groupKey)),
programs: new Collection(groupedPrograms.get(groupKey))
})
await this.save(guide)
}
}
async save(guide: Guide) {
const storage = new Storage(path.dirname(guide.filepath))
const xmlFilepath = guide.filepath
const xmlFilename = path.basename(xmlFilepath)
this.logger.info(` saving to "${xmlFilepath}"...`)
const xmltv = guide.toString()
await storage.save(xmlFilename, xmltv)
if (guide.gzip) {
const zip = new Zip()
const compressed = zip.compress(xmltv)
const gzFilepath = `${guide.filepath}.gz`
const gzFilename = path.basename(gzFilepath)
this.logger.info(` saving to "${gzFilepath}"...`)
await storage.save(gzFilename, compressed)
}
}
}

View File

@@ -1,52 +1,42 @@
interface Column { import { HTMLTableColumn, HTMLTableDataItem, HTMLTableRow } from '../types/htmlTable'
name: string import { Collection } from '@freearhey/core'
nowrap?: boolean import { EOL } from '../constants'
align?: string
colspan?: number
}
type DataItem = {
value: string
nowrap?: boolean
align?: string
colspan?: number
}[]
export class HTMLTable { export class HTMLTable {
data: DataItem[] rows: Collection<HTMLTableRow>
columns: Column[] columns: Collection<HTMLTableColumn>
constructor(data: DataItem[], columns: Column[]) { constructor(rows: Collection<HTMLTableRow>, columns: Collection<HTMLTableColumn>) {
this.data = data this.rows = rows
this.columns = columns this.columns = columns
} }
toString() { toString() {
let output = '<table>\r\n' let output = `<table>${EOL}`
output += ' <thead>\r\n <tr>' output += ` <thead>${EOL} <tr>`
for (const column of this.columns) { this.columns.forEach((column: HTMLTableColumn) => {
const nowrap = column.nowrap ? ' nowrap' : '' const nowrap = column.nowrap ? ' nowrap' : ''
const align = column.align ? ` align="${column.align}"` : '' const align = column.align ? ` align="${column.align}"` : ''
const colspan = column.colspan ? ` colspan="${column.colspan}"` : '' const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
output += `<th${align}${nowrap}${colspan}>${column.name}</th>` output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
} })
output += '</tr>\r\n </thead>\r\n' output += `</tr>${EOL} </thead>${EOL}`
output += ' <tbody>\r\n' output += ` <tbody>${EOL}`
for (const row of this.data) { this.rows.forEach((row: HTMLTableRow) => {
output += ' <tr>' output += ' <tr>'
for (const item of row) { row.forEach((item: HTMLTableDataItem) => {
const nowrap = item.nowrap ? ' nowrap' : '' const nowrap = item.nowrap ? ' nowrap' : ''
const align = item.align ? ` align="${item.align}"` : '' const align = item.align ? ` align="${item.align}"` : ''
const colspan = item.colspan ? ` colspan="${item.colspan}"` : '' const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
output += `<td${align}${nowrap}${colspan}>${item.value}</td>` output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
} })
output += '</tr>\r\n' output += `</tr>${EOL}`
} })
output += ' </tbody>\r\n' output += ` </tbody>${EOL}`
output += '</table>' output += '</table>'

View File

@@ -1,14 +1,4 @@
export * from './apiClient'
export * from './channelsParser'
export * from './configLoader'
export * from './dataLoader'
export * from './dataProcessor'
export * from './grabber'
export * from './guideManager'
export * from './htmlTable' export * from './htmlTable'
export * from './issueLoader' export * from './siteConfig'
export * from './issueParser' export * from './utils'
export * from './job'
export * from './proxyParser'
export * from './queue' export * from './queue'
export * from './queueCreator'

View File

@@ -1,37 +0,0 @@
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
import { paginateRest } from '@octokit/plugin-paginate-rest'
import { TESTING, OWNER, REPO } from '../constants'
import { Collection } from '@freearhey/core'
import { Octokit } from '@octokit/core'
import { IssueParser } from './'
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
const octokit = new CustomOctokit()
export class IssueLoader {
async load(props?: { labels: string[] | string }) {
let labels = ''
if (props && props.labels) {
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
}
let issues: object[] = []
if (TESTING) {
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
} else {
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
owner: OWNER,
repo: REPO,
per_page: 100,
labels,
state: 'open',
headers: {
'X-GitHub-Api-Version': '2022-11-28'
}
})
}
const parser = new IssueParser()
return new Collection(issues).map(parser.parse)
}
}

View File

@@ -1,34 +0,0 @@
import { Dictionary } from '@freearhey/core'
import { Issue } from '../models'
const FIELDS = new Dictionary({
Site: 'site'
})
export class IssueParser {
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
const fields = issue.body.split('###')
const data = new Dictionary()
fields.forEach((field: string) => {
const parsed = field.split(/\r?\n/).filter(Boolean)
let _label = parsed.shift()
_label = _label ? _label.trim() : ''
let _value = parsed.join('\r\n')
_value = _value ? _value.trim() : ''
if (!_label || !_value) return data
const id: string = FIELDS.get(_label)
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
if (!id) return
data.set(id, value)
})
const labels = issue.labels.map(label => label.name)
return new Issue({ number: issue.number, labels, data })
}
}

View File

@@ -1,34 +0,0 @@
import { Logger } from '@freearhey/core'
import { Queue, Grabber, GuideManager } from '.'
import { GrabOptions } from '../commands/epg/grab'
interface JobProps {
options: GrabOptions
logger: Logger
queue: Queue
}
export class Job {
options: GrabOptions
logger: Logger
grabber: Grabber
constructor({ queue, logger, options }: JobProps) {
this.options = options
this.logger = logger
this.grabber = new Grabber({ logger, queue, options })
}
async run() {
const { channels, programs } = await this.grabber.grab()
const manager = new GuideManager({
channels,
programs,
options: this.options,
logger: this.logger
})
await manager.createGuides()
}
}

View File

@@ -1,31 +0,0 @@
import { URL } from 'node:url'
interface ProxyParserResult {
protocol: string | null
auth?: {
username?: string
password?: string
}
host: string
port: number | null
}
export class ProxyParser {
parse(_url: string): ProxyParserResult {
const parsed = new URL(_url)
const result: ProxyParserResult = {
protocol: parsed.protocol.replace(':', '') || null,
host: parsed.hostname,
port: parsed.port ? parseInt(parsed.port) : null
}
if (parsed.username || parsed.password) {
result.auth = {}
if (parsed.username) result.auth.username = parsed.username
if (parsed.password) result.auth.password = parsed.password
}
return result
}
}

View File

@@ -1,45 +1,18 @@
import { Dictionary } from '@freearhey/core' import { Collection, Dictionary } from '@freearhey/core'
import { SiteConfig, Channel } from 'epg-grabber' import { QueueItem } from '../types/queue'
export interface QueueItem {
channel: Channel
date: string
config: SiteConfig
error: string | null
}
export class Queue { export class Queue {
_data: Dictionary #items: Dictionary<QueueItem> = new Dictionary<QueueItem>()
constructor() { add(key: string, data: QueueItem) {
this._data = new Dictionary() this.#items.set(key, data)
} }
missing(key: string): boolean { has(key: string): boolean {
return this._data.missing(key) return this.#items.has(key)
} }
add( getItems(): Collection<QueueItem> {
key: string, return new Collection<QueueItem>(Object.values(this.#items.data()))
{ channel, config, date }: { channel: Channel; date: string | null; config: SiteConfig }
) {
this._data.set(key, {
channel,
date,
config,
error: null
})
}
size(): number {
return Object.values(this._data.data()).length
}
items(): QueueItem[] {
return Object.values(this._data.data()) as QueueItem[]
}
isEmpty(): boolean {
return this.size() === 0
} }
} }

View File

@@ -1,63 +0,0 @@
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
import { SITES_DIR, DATA_DIR } from '../constants'
import { GrabOptions } from '../commands/epg/grab'
import { ConfigLoader, Queue } from './'
import { SiteConfig } from 'epg-grabber'
import path from 'path'
interface QueueCreatorProps {
logger: Logger
options: GrabOptions
channels: Collection
}
export class QueueCreator {
configLoader: ConfigLoader
logger: Logger
sitesStorage: Storage
dataStorage: Storage
channels: Collection
options: GrabOptions
constructor({ channels, logger, options }: QueueCreatorProps) {
this.channels = channels
this.logger = logger
this.sitesStorage = new Storage()
this.dataStorage = new Storage(DATA_DIR)
this.options = options
this.configLoader = new ConfigLoader()
}
async create(): Promise<Queue> {
let index = 0
const queue = new Queue()
for (const channel of this.channels.all()) {
channel.index = index++
if (!channel.site || !channel.site_id || !channel.name) continue
const configPath = path.resolve(SITES_DIR, `${channel.site}/${channel.site}.config.js`)
const config: SiteConfig = await this.configLoader.load(configPath)
if (!channel.xmltv_id) {
channel.xmltv_id = channel.site_id
}
const days = this.options.days || config.days || 1
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString())
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
dates.forEach((date: DateTime) => {
const dateString = date.toJSON()
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
if (queue.missing(key)) {
queue.add(key, {
channel,
date: dateString,
config
})
}
})
}
return queue
}
}

View File

@@ -0,0 +1,71 @@
import * as epgGrabber from 'epg-grabber'
import _ from 'lodash'
const _default = {
days: 1,
delay: 0,
output: 'guide.xml',
request: {
method: 'GET',
maxContentLength: 5242880,
timeout: 30000,
withCredentials: true,
jar: null,
responseType: 'arraybuffer',
cache: false,
headers: null,
data: null
},
maxConnections: 1,
site: undefined,
url: undefined,
parser: undefined,
channels: undefined,
lang: 'en',
debug: false,
gzip: false,
curl: false,
logo: ''
}
export class SiteConfig {
days: number
lang: string
delay: number
debug: boolean
gzip: boolean
curl: boolean
maxConnections: number
output: string
request: epgGrabber.Types.SiteConfigRequestConfig
site: string
channels?: string | string[]
url: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
parser: (
context: epgGrabber.Types.SiteConfigParserContext
) =>
| epgGrabber.Types.SiteConfigParserResult[]
| Promise<epgGrabber.Types.SiteConfigParserResult[]>
logo: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
filepath: string
constructor(config: epgGrabber.Types.SiteConfigObject) {
this.site = config.site
this.channels = config.channels
this.url = config.url
this.parser = config.parser
this.filepath = config.filepath
this.days = config.days || _default.days
this.lang = config.lang || _default.lang
this.delay = config.delay || _default.delay
this.debug = config.debug || _default.debug
this.maxConnections = config.maxConnections || _default.maxConnections
this.gzip = config.gzip || _default.gzip
this.curl = config.curl || _default.curl
this.output = config.output || _default.output
this.logo = config.logo || _default.logo
this.request = _.merge(_default.request, config.request)
}
}

106
scripts/core/utils.ts Normal file
View File

@@ -0,0 +1,106 @@
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
import { paginateRest } from '@octokit/plugin-paginate-rest'
import { TESTING, OWNER, REPO, EOL } from '../constants'
import { Collection } from '@freearhey/core'
import { Channel } from '../models/channel'
import { AxiosProxyConfig } from 'axios'
import { Octokit } from '@octokit/core'
import { pathToFileURL } from 'url'
import { Issue } from '../models'
import { URL } from 'node:url'
export function generateChannelsXML(channels: Collection<Channel>): string {
let output = `<?xml version="1.0" encoding="UTF-8"?>${EOL}<channels>${EOL}`
channels.forEach((channel: Channel) => {
const logo = channel.logo ? ` logo="${escapeString(channel.logo)}"` : ''
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
const lang = channel.lang || ''
const site_id = channel.site_id ? escapeString(channel.site_id) : ''
const site = channel.site || ''
const displayName = channel.name ? escapeString(channel.name) : ''
output += ` <channel site="${site}" site_id="${site_id}" lang="${lang}"${logo} xmltv_id="${xmltv_id}">${displayName}</channel>${EOL}`
})
output += `</channels>${EOL}`
return output
}
export function escapeString(value: string, defaultValue = '') {
if (!value) return defaultValue
const regex = new RegExp(
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
'g'
)
value = String(value || '').replace(regex, '')
return value
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;')
.replace(/\n|\r/g, ' ')
.replace(/ +/g, ' ')
.trim()
}
export function parseProxy(string: string): AxiosProxyConfig {
const parsed = new URL(string)
const proxy: AxiosProxyConfig = {
protocol: parsed.protocol.replace(':', ''),
host: parsed.hostname,
port: parsed.port ? parseInt(parsed.port) : 8080
}
if (parsed.username || parsed.password) {
proxy.auth = { username: parsed.username, password: parsed.password }
}
return proxy
}
export async function loadJs(filepath: string) {
const fileUrl = pathToFileURL(filepath).toString()
return (await import(fileUrl)).default
}
export async function loadIssues(props?: { labels: string[] | string }) {
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
const octokit = new CustomOctokit()
let labels = ''
if (props && props.labels) {
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
}
let issues: object[] = []
if (TESTING) {
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
} else {
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
owner: OWNER,
repo: REPO,
per_page: 100,
labels,
state: 'open',
headers: {
'X-GitHub-Api-Version': '2022-11-28'
}
})
}
return new Collection(issues).map(data => new Issue(data))
}

View File

@@ -1,164 +1,23 @@
import { ChannelData, ChannelSearchableData } from '../types/channel' import { ChannelGuideObject } from '../types/channel'
import { Collection, Dictionary } from '@freearhey/core' import * as epgGrabber from 'epg-grabber'
import { Stream, Feed, Logo, GuideChannel } from './' import { SITES_DIR } from '../constants'
import path from 'node:path'
export class Channel { export class Channel extends epgGrabber.Channel {
id?: string getGuideObject(): ChannelGuideObject {
name?: string const [channelId, feedId] = this.xmltv_id.split('@')
altNames?: Collection
network?: string
owners?: Collection
countryCode?: string
subdivisionCode?: string
cityName?: string
categoryIds?: Collection
isNSFW = false
launched?: string
closed?: string
replacedBy?: string
website?: string
feeds?: Collection
logos: Collection = new Collection()
constructor(data?: ChannelData) {
if (!data) return
this.id = data.id
this.name = data.name
this.altNames = new Collection(data.alt_names)
this.network = data.network || undefined
this.owners = new Collection(data.owners)
this.countryCode = data.country
this.subdivisionCode = data.subdivision || undefined
this.cityName = data.city || undefined
this.categoryIds = new Collection(data.categories)
this.isNSFW = data.is_nsfw
this.launched = data.launched || undefined
this.closed = data.closed || undefined
this.replacedBy = data.replaced_by || undefined
this.website = data.website || undefined
}
withFeeds(feedsGroupedByChannelId: Dictionary): this {
if (this.id) this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
return this
}
withLogos(logosGroupedByChannelId: Dictionary): this {
if (this.id) this.logos = new Collection(logosGroupedByChannelId.get(this.id))
return this
}
getFeeds(): Collection {
if (!this.feeds) return new Collection()
return this.feeds
}
getGuideChannels(): Collection {
let channels = new Collection()
this.getFeeds().forEach((feed: Feed) => {
channels = channels.concat(feed.getGuideChannels())
})
return channels
}
getGuideChannelNames(): Collection {
return this.getGuideChannels()
.map((channel: GuideChannel) => channel.siteName)
.uniq()
}
getStreams(): Collection {
let streams = new Collection()
this.getFeeds().forEach((feed: Feed) => {
streams = streams.concat(feed.getStreams())
})
return streams
}
getStreamNames(): Collection {
return this.getStreams()
.map((stream: Stream) => stream.getName())
.uniq()
}
getFeedFullNames(): Collection {
return this.getFeeds()
.map((feed: Feed) => feed.getFullName())
.uniq()
}
getName(): string {
return this.name || ''
}
getId(): string {
return this.id || ''
}
getAltNames(): Collection {
return this.altNames || new Collection()
}
getLogos(): Collection {
function feed(logo: Logo): number {
if (!logo.feed) return 1
if (logo.feed.isMain) return 1
return 0
}
function format(logo: Logo): number {
const levelByFormat: Record<string, number> = {
SVG: 0,
PNG: 3,
APNG: 1,
WebP: 1,
AVIF: 1,
JPEG: 2,
GIF: 1
}
return logo.format ? levelByFormat[logo.format] : 0
}
function size(logo: Logo): number {
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
}
return this.logos.orderBy([feed, format, size], ['desc', 'desc', 'asc'], false)
}
getLogo(): Logo | undefined {
return this.getLogos().first()
}
hasLogo(): boolean {
return this.getLogos().notEmpty()
}
getLogoUrl(): string {
const logo = this.getLogo()
if (!logo) return ''
return logo.url || ''
}
getSearchable(): ChannelSearchableData {
return { return {
id: this.getId(), channel: channelId || null,
name: this.getName(), feed: feedId || null,
altNames: this.getAltNames().all(), site: this.site,
guideNames: this.getGuideChannelNames().all(), site_id: this.site_id,
streamNames: this.getStreamNames().all(), site_name: this.name,
feedFullNames: this.getFeedFullNames().all() lang: this.lang || 'en'
} }
} }
getConfigPath(): string {
return path.resolve(SITES_DIR, `${this.site}/${this.site}.config.js`)
}
} }

View File

@@ -1,77 +0,0 @@
import { Collection } from '@freearhey/core'
import epgGrabber from 'epg-grabber'
export class ChannelList {
channels: Collection = new Collection()
constructor(data: { channels: epgGrabber.Channel[] }) {
this.channels = new Collection(data.channels)
}
add(channel: epgGrabber.Channel): this {
this.channels.add(channel)
return this
}
get(siteId: string): epgGrabber.Channel | undefined {
return this.channels.find((channel: epgGrabber.Channel) => channel.site_id == siteId)
}
sort(): this {
this.channels = this.channels.orderBy([
(channel: epgGrabber.Channel) => channel.lang || '_',
(channel: epgGrabber.Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
(channel: epgGrabber.Channel) => channel.site_id
])
return this
}
toString() {
function escapeString(value: string, defaultValue = '') {
if (!value) return defaultValue
const regex = new RegExp(
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
'g'
)
value = String(value || '').replace(regex, '')
return value
.replace(/&/g, '&amp;')
.replace(/</g, '&lt;')
.replace(/>/g, '&gt;')
.replace(/"/g, '&quot;')
.replace(/'/g, '&apos;')
.replace(/\n|\r/g, ' ')
.replace(/ +/g, ' ')
.trim()
}
let output = '<?xml version="1.0" encoding="UTF-8"?>\r\n<channels>\r\n'
this.channels.forEach((channel: epgGrabber.Channel) => {
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
const lang = channel.lang || ''
const site_id = channel.site_id || ''
const site = channel.site || ''
const displayName = channel.name ? escapeString(channel.name) : ''
output += ` <channel site="${site}" lang="${lang}" xmltv_id="${xmltv_id}" site_id="${site_id}"${logo}>${displayName}</channel>\r\n`
})
output += '</channels>\r\n'
return output
}
}

View File

@@ -1,124 +0,0 @@
import { Collection, Dictionary } from '@freearhey/core'
import { FeedData } from '../types/feed'
import { Logo, Channel } from '.'
export class Feed {
channelId: string
channel?: Channel
id: string
name: string
isMain: boolean
broadcastAreaCodes: Collection
languageCodes: Collection
timezoneIds: Collection
videoFormat: string
guideChannels?: Collection
streams?: Collection
logos: Collection = new Collection()
constructor(data: FeedData) {
this.channelId = data.channel
this.id = data.id
this.name = data.name
this.isMain = data.is_main
this.broadcastAreaCodes = new Collection(data.broadcast_area)
this.languageCodes = new Collection(data.languages)
this.timezoneIds = new Collection(data.timezones)
this.videoFormat = data.video_format
}
withChannel(channelsKeyById: Dictionary): this {
this.channel = channelsKeyById.get(this.channelId)
return this
}
withStreams(streamsGroupedById: Dictionary): this {
this.streams = new Collection(streamsGroupedById.get(`${this.channelId}@${this.id}`))
if (this.isMain) {
this.streams = this.streams.concat(new Collection(streamsGroupedById.get(this.channelId)))
}
return this
}
withGuideChannels(guideChannelsGroupedByStreamId: Dictionary): this {
this.guideChannels = new Collection(
guideChannelsGroupedByStreamId.get(`${this.channelId}@${this.id}`)
)
if (this.isMain) {
this.guideChannels = this.guideChannels.concat(
new Collection(guideChannelsGroupedByStreamId.get(this.channelId))
)
}
return this
}
withLogos(logosGroupedByStreamId: Dictionary): this {
this.logos = new Collection(logosGroupedByStreamId.get(this.getStreamId()))
return this
}
getGuideChannels(): Collection {
if (!this.guideChannels) return new Collection()
return this.guideChannels
}
getStreams(): Collection {
if (!this.streams) return new Collection()
return this.streams
}
getFullName(): string {
if (!this.channel) return ''
return `${this.channel.name} ${this.name}`
}
getStreamId(): string {
return `${this.channelId}@${this.id}`
}
getLogos(): Collection {
function format(logo: Logo): number {
const levelByFormat: Record<string, number> = {
SVG: 0,
PNG: 3,
APNG: 1,
WebP: 1,
AVIF: 1,
JPEG: 2,
GIF: 1
}
return logo.format ? levelByFormat[logo.format] : 0
}
function size(logo: Logo): number {
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
}
return this.logos.orderBy([format, size], ['desc', 'asc'], false)
}
getLogo(): Logo | undefined {
return this.getLogos().first()
}
hasLogo(): boolean {
return this.getLogos().notEmpty()
}
getLogoUrl(): string {
const logo = this.getLogo()
if (!logo) return ''
return logo.url || ''
}
}

View File

@@ -1,35 +1,59 @@
import { Collection, DateTime } from '@freearhey/core' import { Collection, Logger } from '@freearhey/core'
import { generateXMLTV } from 'epg-grabber' import { Storage } from '@freearhey/storage-js'
import { EPGGrabber } from 'epg-grabber'
import { Channel, Program } from '.'
import utc from 'dayjs/plugin/utc'
import dayjs from 'dayjs'
import path from 'node:path'
import pako from 'pako'
dayjs.extend(utc)
interface GuideData { interface GuideData {
channels: Collection channels: Collection<Channel>
programs: Collection programs: Collection<Program>
filepath: string filepath: string
gzip: boolean gzip: boolean
} }
export class Guide { export class Guide {
channels: Collection channels: Collection<Channel>
programs: Collection programs: Collection<Program>
filepath: string filepath: string
gzip: boolean gzip: boolean
constructor({ channels, programs, filepath, gzip }: GuideData) { constructor(data: GuideData) {
this.channels = channels this.channels = data.channels
this.programs = programs this.programs = data.programs
this.filepath = filepath this.filepath = data.filepath
this.gzip = gzip || false this.gzip = data.gzip || false
}
addChannel(channel: Channel) {
this.channels.add(channel)
} }
toString() { toString() {
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString(), { const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
timezone: 'UTC'
})
return generateXMLTV({ return EPGGrabber.generateXMLTV(this.channels.all(), this.programs.all(), currDate)
channels: this.channels.all(), }
programs: this.programs.all(),
date: currDate.toJSON() async save({ logger }: { logger: Logger }) {
}) const dir = path.dirname(this.filepath)
const storage = new Storage(dir)
const xmlFilepath = this.filepath
const xmlFilename = path.basename(xmlFilepath)
logger.info(` saving to "${xmlFilepath}"...`)
const xmltv = this.toString()
await storage.save(xmlFilename, xmltv)
if (this.gzip) {
const compressed = pako.gzip(xmltv)
const gzFilepath = `${this.filepath}.gz`
const gzFilename = path.basename(gzFilepath)
logger.info(` saving to "${gzFilepath}"...`)
await storage.save(gzFilename, compressed)
}
} }
} }

View File

@@ -1,59 +0,0 @@
import { Dictionary } from '@freearhey/core'
import epgGrabber from 'epg-grabber'
import { Feed, Channel } from '.'
export class GuideChannel {
channelId?: string
channel?: Channel
feedId?: string
feed?: Feed
xmltvId?: string
languageCode?: string
siteId?: string
logoUrl?: string
siteDomain?: string
siteName?: string
constructor(data: epgGrabber.Channel) {
const [channelId, feedId] = data.xmltv_id ? data.xmltv_id.split('@') : [undefined, undefined]
this.channelId = channelId
this.feedId = feedId
this.xmltvId = data.xmltv_id
this.languageCode = data.lang
this.siteId = data.site_id
this.logoUrl = data.logo
this.siteDomain = data.site
this.siteName = data.name
}
withChannel(channelsKeyById: Dictionary): this {
if (this.channelId) this.channel = channelsKeyById.get(this.channelId)
return this
}
withFeed(feedsKeyByStreamId: Dictionary): this {
if (this.feedId) this.feed = feedsKeyByStreamId.get(this.getStreamId())
return this
}
getStreamId(): string {
if (!this.channelId) return ''
if (!this.feedId) return this.channelId
return `${this.channelId}@${this.feedId}`
}
toJSON() {
return {
channel: this.channelId || null,
feed: this.feedId || null,
site: this.siteDomain || '',
site_id: this.siteId || '',
site_name: this.siteName || '',
lang: this.languageCode || ''
}
}
}

View File

@@ -1,9 +1,5 @@
export * from './channel'
export * from './feed'
export * from './guide' export * from './guide'
export * from './guideChannel'
export * from './issue' export * from './issue'
export * from './logo'
export * from './site' export * from './site'
export * from './stream' export * from './channel'
export * from './channelList' export * from './program'

View File

@@ -1,21 +1,44 @@
import { EOL, OWNER, REPO } from '../constants'
import { Dictionary } from '@freearhey/core' import { Dictionary } from '@freearhey/core'
import { OWNER, REPO } from '../constants'
interface IssueProps { const FIELDS = new Dictionary({
Site: 'site'
})
interface IssueData {
number: number number: number
labels: string[] body: string
data: Dictionary labels: { name: string }[]
} }
export class Issue { export class Issue {
number: number number: number
labels: string[] labels: string[]
data: Dictionary data: Dictionary<string>
constructor({ number, labels, data }: IssueProps) { constructor(issue: IssueData) {
this.number = number const fields = typeof issue.body === 'string' ? issue.body.split('###') : []
this.labels = labels
this.data = data this.data = new Dictionary<string>()
fields.forEach((field: string) => {
const parsed = field.split(/\r?\n/).filter(Boolean)
let _label = parsed.shift()
_label = _label ? _label.trim() : ''
let _value = parsed.join(EOL)
_value = _value ? _value.trim() : ''
if (!_label || !_value) return
const id: string | undefined = FIELDS.get(_label)
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
if (!id) return
this.data.set(id, value)
})
this.labels = issue.labels.map(label => label.name)
this.number = issue.number
} }
getURL() { getURL() {

View File

@@ -1,41 +0,0 @@
import { Collection, type Dictionary } from '@freearhey/core'
import type { LogoData } from '../types/logo'
import { type Feed } from './feed'
export class Logo {
channelId?: string
feedId?: string
feed?: Feed
tags: Collection = new Collection()
width = 0
height = 0
format?: string
url?: string
constructor(data?: LogoData) {
if (!data) return
this.channelId = data.channel
this.feedId = data.feed || undefined
this.tags = new Collection(data.tags)
this.width = data.width
this.height = data.height
this.format = data.format || undefined
this.url = data.url
}
withFeed(feedsKeyByStreamId: Dictionary): this {
if (!this.feedId) return this
this.feed = feedsKeyByStreamId.get(this.getStreamId())
return this
}
getStreamId(): string {
if (!this.channelId) return ''
if (!this.feedId) return this.channelId
return `${this.channelId}@${this.feedId}`
}
}

View File

@@ -0,0 +1,3 @@
import * as epgGrabber from 'epg-grabber'
export class Program extends epgGrabber.Program {}

View File

@@ -7,36 +7,36 @@ enum StatusCode {
OK = 'ok' OK = 'ok'
} }
interface Status { export interface Status {
code: StatusCode code: StatusCode
emoji: string emoji: string
} }
interface SiteProps { export interface SiteData {
domain: string domain: string
totalChannels?: number totalChannels?: number
markedChannels?: number markedChannels?: number
issues: Collection issues: Collection<Issue>
} }
export class Site { export class Site {
domain: string domain: string
totalChannels: number totalChannels: number
markedChannels: number markedChannels: number
issues: Collection issues: Collection<Issue>
constructor({ domain, totalChannels = 0, markedChannels = 0, issues }: SiteProps) { constructor(data: SiteData) {
this.domain = domain this.domain = data.domain
this.totalChannels = totalChannels this.totalChannels = data.totalChannels || 0
this.markedChannels = markedChannels this.markedChannels = data.markedChannels || 0
this.issues = issues this.issues = data.issues
} }
getStatus(): Status { getStatus(): Status {
const issuesWithStatusDown = this.issues.filter((issue: Issue) => const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
issue.labels.find(label => label === 'status:down') issue.labels.find(label => label === 'status:down')
) )
if (issuesWithStatusDown.notEmpty()) if (issuesWithStatusDown.isNotEmpty())
return { return {
code: StatusCode.DOWN, code: StatusCode.DOWN,
emoji: '🔴' emoji: '🔴'
@@ -45,7 +45,7 @@ export class Site {
const issuesWithStatusWarning = this.issues.filter((issue: Issue) => const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
issue.labels.find(label => label === 'status:warning') issue.labels.find(label => label === 'status:warning')
) )
if (issuesWithStatusWarning.notEmpty()) if (issuesWithStatusWarning.isNotEmpty())
return { return {
code: StatusCode.WARNING, code: StatusCode.WARNING,
emoji: '🟡' emoji: '🟡'
@@ -57,7 +57,7 @@ export class Site {
} }
} }
getIssues(): Collection { getIssueUrls(): Collection<string> {
return this.issues.map((issue: Issue) => issue.getURL()) return this.issues.map((issue: Issue) => issue.getURL())
} }
} }

View File

@@ -1,58 +0,0 @@
import type { StreamData } from '../types/stream'
import { Feed, Channel } from './index'
export class Stream {
name?: string
url: string
id?: string
channelId?: string
channel?: Channel
feedId?: string
feed?: Feed
filepath?: string
line?: number
label?: string
verticalResolution?: number
isInterlaced?: boolean
referrer?: string
userAgent?: string
groupTitle = 'Undefined'
removed = false
constructor(data: StreamData) {
const id = data.channel && data.feed ? [data.channel, data.feed].join('@') : data.channel
const { verticalResolution, isInterlaced } = parseQuality(data.quality)
this.id = id || undefined
this.channelId = data.channel || undefined
this.feedId = data.feed || undefined
this.name = data.name || undefined
this.url = data.url
this.referrer = data.referrer || undefined
this.userAgent = data.user_agent || undefined
this.verticalResolution = verticalResolution || undefined
this.isInterlaced = isInterlaced || undefined
this.label = data.label || undefined
}
getId(): string {
return this.id || ''
}
getName(): string {
return this.name || ''
}
}
function parseQuality(quality: string | null): {
verticalResolution: number | null
isInterlaced: boolean | null
} {
if (!quality) return { verticalResolution: null, isInterlaced: null }
const [, verticalResolutionString] = quality.match(/^(\d+)/) || [null, undefined]
const isInterlaced = /i$/i.test(quality)
let verticalResolution = 0
if (verticalResolutionString) verticalResolution = parseInt(verticalResolutionString)
return { verticalResolution, isInterlaced }
}

View File

@@ -1,27 +1,8 @@
import { Collection } from '@freearhey/core' export interface ChannelGuideObject {
channel: string | null
export interface ChannelData { feed: string | null
id: string site: string
name: string site_id: string
alt_names: string[] site_name: string
network: string lang: string
owners: Collection
country: string
subdivision: string
city: string
categories: Collection
is_nsfw: boolean
launched: string
closed: string
replaced_by: string
website: string
}
export interface ChannelSearchableData {
id: string
name: string
altNames: string[]
guideNames: string[]
streamNames: string[]
feedFullNames: string[]
} }

View File

@@ -1,20 +0,0 @@
import { Storage } from '@freearhey/core'
export interface DataLoaderProps {
storage: Storage
}
export interface DataLoaderData {
countries: object | object[]
regions: object | object[]
subdivisions: object | object[]
languages: object | object[]
categories: object | object[]
blocklist: object | object[]
channels: object | object[]
feeds: object | object[]
timezones: object | object[]
guides: object | object[]
streams: object | object[]
logos: object | object[]
}

View File

@@ -1,16 +0,0 @@
import { Collection, Dictionary } from '@freearhey/core'
export interface DataProcessorData {
guideChannelsGroupedByStreamId: Dictionary
feedsGroupedByChannelId: Dictionary
logosGroupedByChannelId: Dictionary
logosGroupedByStreamId: Dictionary
feedsKeyByStreamId: Dictionary
streamsGroupedById: Dictionary
channelsKeyById: Dictionary
guideChannels: Collection
channels: Collection
streams: Collection
feeds: Collection
logos: Collection
}

View File

@@ -1,12 +0,0 @@
import { Collection } from '@freearhey/core'
export interface FeedData {
channel: string
id: string
name: string
is_main: boolean
broadcast_area: Collection
languages: Collection
timezones: Collection
video_format: string
}

View File

@@ -1,8 +0,0 @@
export interface GuideData {
channel: string
feed: string
site: string
site_id: string
site_name: string
lang: string
}

17
scripts/types/htmlTable.d.ts vendored Normal file
View File

@@ -0,0 +1,17 @@
import { Collection } from '@freearhey/core'
export interface HTMLTableColumn {
name: string
nowrap?: boolean
align?: string
colspan?: number
}
export interface HTMLTableDataItem {
value: string
nowrap?: boolean
align?: string
colspan?: number
}
export type HTMLTableRow = Collection<HTMLTableDataItem>

View File

@@ -1,9 +0,0 @@
export interface LogoData {
channel: string
feed: string | null
tags: string[]
width: number
height: number
format: string | null
url: string
}

10
scripts/types/queue.d.ts vendored Normal file
View File

@@ -0,0 +1,10 @@
import { SiteConfig } from '../core/siteConfig'
import { Channel } from '../models/channel'
import { Dayjs } from 'dayjs'
export interface QueueItem {
channel: Channel
date: Dayjs
siteConfig: SiteConfig
error: string | null
}

View File

@@ -1,10 +0,0 @@
export interface StreamData {
channel: string | null
feed: string | null
name?: string
url: string
referrer: string | null
user_agent: string | null
quality: string | null
label: string | null
}