mirror of
https://github.com/iptv-org/epg
synced 2025-12-15 09:56:42 -05:00
Update scripts
This commit is contained in:
117
scripts/api.ts
Normal file
117
scripts/api.ts
Normal file
@@ -0,0 +1,117 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { DATA_DIR } from './constants'
|
||||
import cliProgress from 'cli-progress'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
|
||||
const data = {
|
||||
channelsKeyById: new Dictionary<sdk.Models.Channel>(),
|
||||
feedsKeyByStreamId: new Dictionary<sdk.Models.Feed>(),
|
||||
feedsGroupedByChannelId: new Dictionary<sdk.Models.Feed[]>()
|
||||
}
|
||||
|
||||
interface SearchIndex {
|
||||
search: (query: string) => sdk.Types.ChannelSearchableData[]
|
||||
}
|
||||
|
||||
let searchIndex: SearchIndex
|
||||
|
||||
async function loadData() {
|
||||
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
|
||||
await dataManager.loadFromDisk()
|
||||
dataManager.processData()
|
||||
|
||||
const { channels, feeds } = dataManager.getProcessedData()
|
||||
|
||||
data.channelsKeyById = channels.keyBy((channel: sdk.Models.Channel) => channel.id)
|
||||
data.feedsKeyByStreamId = feeds.keyBy((feed: sdk.Models.Feed) => feed.getStreamId())
|
||||
data.feedsGroupedByChannelId = feeds.groupBy((feed: sdk.Models.Feed) => feed.channel)
|
||||
|
||||
searchIndex = sdk.SearchEngine.createIndex<sdk.Models.Channel>(channels)
|
||||
}
|
||||
|
||||
async function downloadData() {
|
||||
function formatBytes(bytes: number) {
|
||||
if (bytes === 0) return '0 B'
|
||||
const k = 1024
|
||||
const sizes = ['B', 'KB', 'MB', 'GB']
|
||||
const i = Math.floor(Math.log(bytes) / Math.log(k))
|
||||
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + ' ' + sizes[i]
|
||||
}
|
||||
|
||||
const files = [
|
||||
'blocklist',
|
||||
'categories',
|
||||
'channels',
|
||||
'cities',
|
||||
'countries',
|
||||
'feeds',
|
||||
'guides',
|
||||
'languages',
|
||||
'logos',
|
||||
'regions',
|
||||
'streams',
|
||||
'subdivisions',
|
||||
'timezones'
|
||||
]
|
||||
|
||||
const multiBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? formatBytes(payload.speed) + '/s' : 'N/A'
|
||||
const total = formatBytes(params.total)
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
|
||||
const dataManager = new sdk.DataManager({ dataDir: DATA_DIR })
|
||||
|
||||
let requests: Promise<unknown>[] = []
|
||||
for (let basename of files) {
|
||||
const filename = `${basename}.json`
|
||||
const progressBar = multiBar.create(0, 0, { filename })
|
||||
const request = dataManager.downloadFileToDisk(basename, {
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
|
||||
requests.push(request)
|
||||
}
|
||||
|
||||
await Promise.allSettled(requests).catch(console.error)
|
||||
}
|
||||
|
||||
function searchChannels(query: string): Collection<sdk.Models.Channel> {
|
||||
if (!searchIndex) return new Collection<sdk.Models.Channel>()
|
||||
|
||||
const results = searchIndex.search(query)
|
||||
|
||||
const channels = new Collection<sdk.Models.Channel>()
|
||||
|
||||
new Collection<sdk.Types.ChannelSearchableData>(results).forEach(
|
||||
(item: sdk.Types.ChannelSearchableData) => {
|
||||
const channel = data.channelsKeyById.get(item.id)
|
||||
if (channel) channels.add(channel)
|
||||
}
|
||||
)
|
||||
|
||||
return channels
|
||||
}
|
||||
|
||||
export { data, loadData, downloadData, searchChannels }
|
||||
@@ -1,41 +1,43 @@
|
||||
import { Logger, Collection, Storage } from '@freearhey/core'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { GuideChannel } from '../../models'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
const files: string[] = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
const channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList = await parser.parse(filepath)
|
||||
|
||||
channelList.channels.forEach((data: epgGrabber.Channel) => {
|
||||
channels.add(new GuideChannel(data))
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`found ${channels.count()} channel(s)`)
|
||||
|
||||
const output = channels.map((channel: GuideChannel) => channel.toJSON())
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { ChannelGuideObject } from '../../types/channel'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Channel } from '../../models'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
|
||||
const files: string[] = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
const channels = new Collection<Channel>()
|
||||
for (const filepath of files) {
|
||||
const xml = await sitesStorage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
channelsFromXML.forEach((channel: Channel) => {
|
||||
channels.add(channel)
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`found ${channels.count()} channel(s)`)
|
||||
|
||||
const output = channels.map<ChannelGuideObject>((channel: Channel) => channel.getGuideObject())
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,25 +1,7 @@
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { DataLoader } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage })
|
||||
|
||||
await Promise.all([
|
||||
loader.download('blocklist.json'),
|
||||
loader.download('categories.json'),
|
||||
loader.download('channels.json'),
|
||||
loader.download('countries.json'),
|
||||
loader.download('languages.json'),
|
||||
loader.download('regions.json'),
|
||||
loader.download('subdivisions.json'),
|
||||
loader.download('feeds.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json'),
|
||||
loader.download('logos.json')
|
||||
])
|
||||
}
|
||||
|
||||
main()
|
||||
import { downloadData } from '../../api'
|
||||
|
||||
async function main() {
|
||||
await downloadData()
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,216 +1,200 @@
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, ChannelList, Feed } from '../../models'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
interface ChoiceValue { type: string; value?: Feed | Channel }
|
||||
interface Choice { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelList)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const parser = new ChannelsParser({ storage })
|
||||
channelList = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = channelList.channels.filter(
|
||||
(channel: epgGrabber.Channel) => !channel.xmltv_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const channel of parsedChannelsWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(
|
||||
channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId,
|
||||
channelsKeyById
|
||||
)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
channel: epgGrabber.Channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = feedsGroupedByChannelId.has(channelId)
|
||||
? new Collection(feedsGroupedByChannelId.get(channelId))
|
||||
: new Collection()
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string, channelList: ChannelList) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
storage.saveSync(filepath, channelList.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
import { loadData, data, searchChannels } from '../../api'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Channel } from '../../models'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import * as sdk from '@iptv-org/sdk'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
interface ChoiceValue {
|
||||
type: string
|
||||
value?: sdk.Models.Feed | sdk.Models.Channel
|
||||
}
|
||||
interface Choice {
|
||||
name: string
|
||||
short?: string
|
||||
value: ChoiceValue
|
||||
default?: boolean
|
||||
}
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelsFromXML = new Collection<Channel>()
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelsFromXML)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
const channelsFromXMLWithoutId = channelsFromXML.filter((channel: Channel) => !channel.xmltv_id)
|
||||
|
||||
logger.info(
|
||||
`found ${channelsFromXML.count()} channels (including ${channelsFromXMLWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('starting...')
|
||||
console.log()
|
||||
|
||||
for (const channel of channelsFromXMLWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(channel)
|
||||
} catch {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
channelsFromXMLWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(channel: epgGrabber.Channel): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchChannels(query)
|
||||
const choices = getChoicesForChannel(similarChannels).all()
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '')
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string): Promise<string> {
|
||||
const channelFeeds = new Collection(data.feedsGroupedByChannelId.get(channelId))
|
||||
const choices = getChoicesForFeed(channelFeeds).all()
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChoicesForChannel(channels: Collection<sdk.Models.Channel>): Collection<Choice> {
|
||||
const choices = new Collection<Choice>()
|
||||
|
||||
channels.forEach((channel: sdk.Models.Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
|
||||
|
||||
choices.add({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choices.add({ name: 'Type...', value: { type: 'type' } })
|
||||
choices.add({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choices
|
||||
}
|
||||
|
||||
function getChoicesForFeed(feeds: Collection<sdk.Models.Feed>): Collection<Choice> {
|
||||
const choices = new Collection<Choice>()
|
||||
|
||||
feeds.forEach((feed: sdk.Models.Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.is_main) name += ' [main]'
|
||||
|
||||
choices.add({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.is_main,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choices.add({ name: 'Type...', value: { type: 'type' } })
|
||||
choices.add({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choices
|
||||
}
|
||||
|
||||
function save(filepath: string, channelsFromXML: Collection<Channel>) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
const xml = generateChannelsXML(channelsFromXML)
|
||||
storage.saveSync(filepath, xml)
|
||||
console.log()
|
||||
logger.info(`File '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
|
||||
60
scripts/commands/channels/format.ts
Normal file
60
scripts/commands/channels/format.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { data, loadData } from '../../api'
|
||||
import { Channel } from '../../models'
|
||||
import { program } from 'commander'
|
||||
|
||||
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('loading data from api...')
|
||||
await loadData()
|
||||
|
||||
logger.info('loading *.channels.xml files...')
|
||||
const storage = new Storage()
|
||||
const files = program.args.length
|
||||
? program.args
|
||||
: await storage.list(`${SITES_DIR}/**/*.channels.xml`)
|
||||
|
||||
logger.info(`found ${files.length} file(s)`)
|
||||
|
||||
logger.info('formating...')
|
||||
for (const filepath of files) {
|
||||
if (!storage.existsSync(filepath)) continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
channelsFromXML.forEach((channel: Channel) => {
|
||||
if (!channel.xmltv_id) return
|
||||
if (data.feedsKeyByStreamId.get(channel.xmltv_id)) return
|
||||
|
||||
const channelData = data.channelsKeyById.get(channel.xmltv_id)
|
||||
if (channelData) {
|
||||
const mainFeed = channelData.getMainFeed()
|
||||
if (mainFeed) {
|
||||
channel.xmltv_id = mainFeed.getStreamId()
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
channel.xmltv_id = ''
|
||||
})
|
||||
|
||||
channelsFromXML.sortBy((channel: Channel) => channel.site_id)
|
||||
|
||||
const output = generateChannelsXML(channelsFromXML)
|
||||
|
||||
await storage.save(filepath, output)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
@@ -1,109 +1,109 @@
|
||||
import chalk from 'chalk'
|
||||
import { program } from 'commander'
|
||||
import { Storage, File } from '@freearhey/core'
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,86 +1,119 @@
|
||||
import { Logger, File, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { ChannelList } from '../../models'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
interface ParseOptions {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
channelList = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: Record<string, string> = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
|
||||
channel.site = config.site
|
||||
|
||||
return channel
|
||||
})
|
||||
|
||||
const newChannelList = new ChannelList({ channels: [] })
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sort()
|
||||
|
||||
await storage.save(outputFilepath, newChannelList.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { generateChannelsXML } from '../../core'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import { Channel } from '../../models'
|
||||
import { Command } from 'commander'
|
||||
|
||||
interface SiteConfigChannelData {
|
||||
xmltv_id: string
|
||||
name: string
|
||||
site_id: string
|
||||
lang?: string
|
||||
logo?: string
|
||||
url?: string
|
||||
lcn?: string
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
interface ParseOptions {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
const args: Record<string, string> = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let channelsFromXML = new Collection<Channel>()
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
const xml = await storage.load(outputFilepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
}
|
||||
|
||||
let configChannels = config.channels(args)
|
||||
if (isPromise(configChannels)) {
|
||||
configChannels = await configChannels
|
||||
}
|
||||
|
||||
const channelsFromConfig = new Collection<SiteConfigChannelData>(configChannels).map(
|
||||
(data: SiteConfigChannelData) => {
|
||||
return new Channel({
|
||||
xmltv_id: data.xmltv_id,
|
||||
name: data.name,
|
||||
site_id: data.site_id,
|
||||
lang: data.lang || null,
|
||||
logo: data.logo || null,
|
||||
url: data.url || null,
|
||||
lcn: data.lcn || null,
|
||||
site: config.site,
|
||||
index: -1
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
const newChannelList = new Collection<Channel>()
|
||||
channelsFromConfig.forEach((channel: Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: Channel | undefined = channelsFromXML.find(
|
||||
(_channel: Channel) => _channel.site_id == channel.site_id
|
||||
)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sortBy([
|
||||
(channel: Channel) => channel.lang || '_',
|
||||
(channel: Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: Channel) => channel.site_id
|
||||
])
|
||||
|
||||
const xml = generateChannelsXML(newChannelList)
|
||||
|
||||
await storage.save(outputFilepath, xml)
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,100 +1,96 @@
|
||||
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Storage, Dictionary, File } from '@freearhey/core'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelList } from '../../models'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
interface ValidationError {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang?: string
|
||||
xmltv_id?: string
|
||||
site_id?: string
|
||||
logo?: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
|
||||
const parser = new ChannelsParser({
|
||||
storage: new Storage()
|
||||
})
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
let totalWarnings = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel })
|
||||
totalWarnings++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel })
|
||||
totalWarnings++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
const totalProblems = totalWarnings + totalErrors
|
||||
if (totalProblems > 0) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
|
||||
)
|
||||
)
|
||||
if (totalErrors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Storage, File } from '@freearhey/storage-js'
|
||||
import epgGrabber, { EPGGrabber } from 'epg-grabber'
|
||||
import { loadData, data } from '../../api'
|
||||
import { Channel } from '../../models'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
interface ValidationError {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang: string | null
|
||||
xmltv_id: string | null
|
||||
site_id: string | null
|
||||
logo: string | null
|
||||
}
|
||||
|
||||
async function main() {
|
||||
await loadData()
|
||||
const { channelsKeyById, feedsKeyByStreamId } = data
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
let totalWarnings = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const channelList = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.forEach((channel: Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel.toObject() })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel.toObject() })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel.toObject() })
|
||||
totalWarnings++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel.toObject() })
|
||||
totalWarnings++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
const totalProblems = totalWarnings + totalErrors
|
||||
if (totalProblems > 0) {
|
||||
console.log(
|
||||
chalk.red(
|
||||
`${totalProblems} problems (${totalErrors} errors, ${totalWarnings} warnings) in ${totalFiles} file(s)`
|
||||
)
|
||||
)
|
||||
if (totalErrors > 0) {
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,133 +1,289 @@
|
||||
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
|
||||
import { QueueCreator, Job, ChannelsParser } from '../../core'
|
||||
import { Option, program } from 'commander'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { ChannelList } from '../../models'
|
||||
|
||||
program
|
||||
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'-c, --channels <path>',
|
||||
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
|
||||
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
|
||||
.addOption(
|
||||
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request').env(
|
||||
'TIMEOUT'
|
||||
)
|
||||
)
|
||||
.addOption(
|
||||
new Option('-d, --delay <milliseconds>', 'Override the default delay between request').env(
|
||||
'DELAY'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
|
||||
)
|
||||
.argParser(value => parseInt(value))
|
||||
.env('DAYS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
|
||||
.default(1)
|
||||
.env('MAX_CONNECTIONS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--gzip', 'Create a compressed version of the guide as well')
|
||||
.default(false)
|
||||
.env('GZIP')
|
||||
)
|
||||
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
|
||||
.parse()
|
||||
|
||||
export interface GrabOptions {
|
||||
site?: string
|
||||
channels?: string
|
||||
output: string
|
||||
gzip: boolean
|
||||
curl: boolean
|
||||
maxConnections: number
|
||||
timeout?: string
|
||||
delay?: string
|
||||
lang?: string
|
||||
days?: number
|
||||
proxy?: string
|
||||
}
|
||||
|
||||
const options: GrabOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!options.site && !options.channels)
|
||||
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
|
||||
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('starting...')
|
||||
|
||||
logger.info('config:')
|
||||
logger.tree(options)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
|
||||
let files: string[] = []
|
||||
if (options.site) {
|
||||
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
files = await storage.list(pattern)
|
||||
} else if (options.channels) {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
channels = channels.concat(channelList.channels)
|
||||
}
|
||||
|
||||
if (options.lang) {
|
||||
channels = channels.filter((channel: Channel) => {
|
||||
if (!options.lang || !channel.lang) return true
|
||||
|
||||
return options.lang.includes(channel.lang)
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(` found ${channels.count()} channel(s)`)
|
||||
|
||||
logger.info('run:')
|
||||
runJob({ logger, channels })
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) {
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
|
||||
const queueCreator = new QueueCreator({
|
||||
channels,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
const queue = await queueCreator.create()
|
||||
const job = new Job({
|
||||
queue,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
|
||||
await job.run()
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
import { Logger, Timer, Collection, Template } from '@freearhey/core'
|
||||
import epgGrabber, { EPGGrabber, EPGGrabberMock } from 'epg-grabber'
|
||||
import { loadJs, parseProxy, SiteConfig, Queue } from '../../core'
|
||||
import { Channel, Guide, Program } from '../../models'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
import { PromisyClass, TaskQueue } from 'cwait'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { QueueItem } from '../../types/queue'
|
||||
import { Option, program } from 'commander'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { data, loadData } from '../../api'
|
||||
import dayjs, { Dayjs } from 'dayjs'
|
||||
import path from 'path'
|
||||
|
||||
program
|
||||
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'-c, --channels <path>',
|
||||
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
|
||||
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
|
||||
.addOption(
|
||||
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request')
|
||||
.env('TIMEOUT')
|
||||
.argParser(parseInt)
|
||||
)
|
||||
.addOption(
|
||||
new Option('-d, --delay <milliseconds>', 'Override the default delay between request')
|
||||
.env('DELAY')
|
||||
.argParser(parseInt)
|
||||
)
|
||||
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
|
||||
)
|
||||
.argParser(parseInt)
|
||||
.env('DAYS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
|
||||
.default(1)
|
||||
.argParser(parseInt)
|
||||
.env('MAX_CONNECTIONS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--gzip', 'Create a compressed version of the guide as well')
|
||||
.default(false)
|
||||
.env('GZIP')
|
||||
)
|
||||
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
|
||||
.parse()
|
||||
|
||||
interface GrabOptions {
|
||||
site?: string
|
||||
channels?: string
|
||||
output: string
|
||||
gzip: boolean
|
||||
curl: boolean
|
||||
maxConnections: number
|
||||
timeout?: number
|
||||
delay?: number
|
||||
lang?: string
|
||||
days?: number
|
||||
proxy?: string
|
||||
}
|
||||
|
||||
const options: GrabOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!options.site && !options.channels)
|
||||
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
|
||||
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info('starting...')
|
||||
|
||||
logger.info('config:')
|
||||
logger.tree(options)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const storage = new Storage()
|
||||
|
||||
let files: string[] = []
|
||||
if (options.site) {
|
||||
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
files = await storage.list(pattern)
|
||||
} else if (options.channels) {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let channelsFromXML = new Collection<Channel>()
|
||||
for (const filepath of files) {
|
||||
const xml = await storage.load(filepath)
|
||||
const parsedChannels = EPGGrabber.parseChannelsXML(xml)
|
||||
const _channelsFromXML = new Collection(parsedChannels).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
channelsFromXML.concat(_channelsFromXML)
|
||||
}
|
||||
|
||||
if (options.lang) {
|
||||
channelsFromXML = channelsFromXML.filter((channel: Channel) => {
|
||||
if (!options.lang) return true
|
||||
|
||||
return options.lang.includes(channel.lang)
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`found ${channelsFromXML.count()} channel(s)`)
|
||||
|
||||
logger.info('loading api data...')
|
||||
await loadData()
|
||||
|
||||
logger.info('creating queue...')
|
||||
|
||||
let index = 0
|
||||
const queue = new Queue()
|
||||
|
||||
for (const channel of channelsFromXML.all()) {
|
||||
channel.index = index++
|
||||
if (!channel.site || !channel.site_id || !channel.name) continue
|
||||
|
||||
const configObject = await loadJs(channel.getConfigPath())
|
||||
|
||||
const siteConfig = new SiteConfig(configObject)
|
||||
|
||||
siteConfig.filepath = channel.getConfigPath()
|
||||
|
||||
if (options.timeout !== undefined) {
|
||||
siteConfig.request = { ...siteConfig.request, ...{ timeout: options.timeout } }
|
||||
}
|
||||
if (options.delay !== undefined) siteConfig.delay = options.delay
|
||||
if (options.curl !== undefined) siteConfig.curl = options.curl
|
||||
if (options.proxy !== undefined) {
|
||||
const proxy = parseProxy(options.proxy)
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(options.proxy)
|
||||
|
||||
siteConfig.request = {
|
||||
...siteConfig.request,
|
||||
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
|
||||
}
|
||||
} else {
|
||||
siteConfig.request = { ...siteConfig.request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) channel.xmltv_id = channel.site_id
|
||||
|
||||
const days = options.days || siteConfig.days || 1
|
||||
const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
|
||||
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
|
||||
|
||||
dates.forEach((date: Dayjs) => {
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${date.toJSON()}`
|
||||
if (queue.has(key)) return
|
||||
queue.add(key, {
|
||||
channel,
|
||||
date,
|
||||
siteConfig,
|
||||
error: null
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
const grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
|
||||
|
||||
const taskQueue = new TaskQueue(Promise as PromisyClass, options.maxConnections)
|
||||
|
||||
const queueItems = queue.getItems()
|
||||
|
||||
const channels = new Collection<Channel>()
|
||||
const programs = new Collection<Program>()
|
||||
|
||||
let i = 1
|
||||
const total = queueItems.count()
|
||||
|
||||
const requests = queueItems.map(
|
||||
taskQueue.wrap(async (queueItem: QueueItem) => {
|
||||
const { channel, siteConfig, date } = queueItem
|
||||
|
||||
if (!channel.logo) {
|
||||
if (siteConfig.logo) {
|
||||
channel.logo = await grabber.loadLogo(channel, date)
|
||||
} else {
|
||||
channel.logo = getLogoForChannel(channel)
|
||||
}
|
||||
}
|
||||
|
||||
channels.add(channel)
|
||||
|
||||
const channelPrograms = await grabber.grab(
|
||||
channel,
|
||||
date,
|
||||
siteConfig,
|
||||
(context: epgGrabber.Types.GrabCallbackContext, error: Error | null) => {
|
||||
logger.info(
|
||||
` [${i}/${total}] ${context.channel.site} (${context.channel.lang}) - ${
|
||||
context.channel.xmltv_id
|
||||
} - ${context.date.format('MMM D, YYYY')} (${context.programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (error) {
|
||||
logger.info(` ERR: ${error.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
const _programs = new Collection<epgGrabber.Program>(channelPrograms).map<Program>(
|
||||
program => new Program(program.toObject())
|
||||
)
|
||||
|
||||
programs.concat(_programs)
|
||||
})
|
||||
)
|
||||
|
||||
logger.info('run:')
|
||||
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
|
||||
await Promise.all(requests.all())
|
||||
|
||||
const pathTemplate = new Template(options.output)
|
||||
|
||||
const channelsGroupedByKey = channels
|
||||
.sortBy([(channel: Channel) => channel.index, (channel: Channel) => channel.xmltv_id])
|
||||
.uniqBy((channel: Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`)
|
||||
.groupBy((channel: Channel) => {
|
||||
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
|
||||
})
|
||||
|
||||
const programsGroupedByKey = programs
|
||||
.sortBy([(program: Program) => program.channel, (program: Program) => program.start])
|
||||
.groupBy((program: Program) => {
|
||||
const lang =
|
||||
program.titles && program.titles.length && program.titles[0].lang
|
||||
? program.titles[0].lang
|
||||
: 'en'
|
||||
|
||||
return pathTemplate.format({ lang, site: program.site || '' })
|
||||
})
|
||||
|
||||
for (const groupKey of channelsGroupedByKey.keys()) {
|
||||
const groupChannels = new Collection(channelsGroupedByKey.get(groupKey))
|
||||
const groupPrograms = new Collection(programsGroupedByKey.get(groupKey))
|
||||
const guide = new Guide({
|
||||
filepath: groupKey,
|
||||
gzip: options.gzip,
|
||||
channels: groupChannels,
|
||||
programs: groupPrograms
|
||||
})
|
||||
|
||||
await guide.save({ logger })
|
||||
}
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
function getLogoForChannel(channel: Channel): string | null {
|
||||
const feedData = data.feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (feedData) {
|
||||
const firstLogo = feedData.getLogos().first()
|
||||
if (firstLogo) return firstLogo.url
|
||||
}
|
||||
|
||||
const [channelId] = channel.xmltv_id.split('@')
|
||||
const channelData = data.channelsKeyById.get(channelId)
|
||||
if (channelData) {
|
||||
const firstLogo = channelData.getLogos().first()
|
||||
if (firstLogo) return firstLogo.url
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
@@ -1,45 +1,46 @@
|
||||
import { Logger, Storage } from '@freearhey/core'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import { program } from 'commander'
|
||||
import fs from 'fs-extra'
|
||||
|
||||
program.argument('<site>', 'Domain name of the site').parse(process.argv)
|
||||
|
||||
const domain = program.args[0]
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`Initializing "${domain}"...\r\n`)
|
||||
|
||||
const dir = domain
|
||||
if (await storage.exists(dir)) {
|
||||
throw new Error(`Folder "${dir}" already exists`)
|
||||
}
|
||||
|
||||
await storage.createDir(dir)
|
||||
|
||||
logger.info(`Creating "${dir}/${domain}.test.js"...`)
|
||||
const testTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_test.js'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/${domain}.test.js`, testTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info(`Creating "${dir}/${domain}.config.js"...`)
|
||||
const configTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_config.js'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/${domain}.config.js`, configTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info(`Creating "${dir}/readme.md"...`)
|
||||
const readmeTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_readme.md'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info('\r\nDone')
|
||||
}
|
||||
|
||||
main()
|
||||
import { SITES_DIR, EOL } from '../../constants'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import { program } from 'commander'
|
||||
import fs from 'fs-extra'
|
||||
|
||||
program.argument('<site>', 'Domain name of the site').parse(process.argv)
|
||||
|
||||
const domain = program.args[0]
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(SITES_DIR)
|
||||
const logger = new Logger()
|
||||
|
||||
logger.info(`Initializing "${domain}"...${EOL}`)
|
||||
|
||||
const dir = domain
|
||||
if (await storage.exists(dir)) {
|
||||
throw new Error(`Folder "${dir}" already exists`)
|
||||
}
|
||||
|
||||
await storage.createDir(dir)
|
||||
|
||||
logger.info(`Creating "${dir}/${domain}.test.js"...`)
|
||||
const testTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_test.js'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/${domain}.test.js`, testTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info(`Creating "${dir}/${domain}.config.js"...`)
|
||||
const configTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_config.js'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/${domain}.config.js`, configTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info(`Creating "${dir}/readme.md"...`)
|
||||
const readmeTemplate = fs.readFileSync(pathToFileURL('scripts/templates/_readme.md'), {
|
||||
encoding: 'utf8'
|
||||
})
|
||||
await storage.save(`${dir}/readme.md`, readmeTemplate.replace(/<DOMAIN>/g, domain))
|
||||
|
||||
logger.info(`${EOL}Done`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,76 +1,80 @@
|
||||
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelList, Issue, Site } from '../../models'
|
||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const channelsParser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await sitesStorage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('putting the data together...')
|
||||
const brokenGuideReports = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'broken guide')
|
||||
)
|
||||
for (const domain of folders) {
|
||||
const filteredIssues = brokenGuideReports.filter(
|
||||
(issue: Issue) => domain === issue.data.get('site')
|
||||
)
|
||||
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await channelsParser.parse(filepath)
|
||||
|
||||
site.totalChannels += channelList.channels.count()
|
||||
site.markedChannels += channelList.channels
|
||||
.filter((channel: Channel) => channel.xmltv_id)
|
||||
.count()
|
||||
}
|
||||
|
||||
sites.add(site)
|
||||
}
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const tableData = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
tableData.add([
|
||||
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
|
||||
{ value: site.totalChannels, align: 'right' },
|
||||
{ value: site.markedChannels, align: 'right' },
|
||||
{ value: site.getStatus().emoji, align: 'center' },
|
||||
{ value: site.getIssues().all().join(', ') }
|
||||
])
|
||||
})
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const table = new HTMLTable(tableData.all(), [
|
||||
{ name: 'Site', align: 'left' },
|
||||
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
|
||||
{ name: 'Status', align: 'left' },
|
||||
{ name: 'Notes', align: 'left' }
|
||||
])
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
|
||||
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
|
||||
await rootStorage.save('SITES.md', sitesContent)
|
||||
}
|
||||
|
||||
main()
|
||||
import { HTMLTableDataItem, HTMLTableRow, HTMLTableColumn } from '../../types/htmlTable'
|
||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Issue, Site, Channel } from '../../models'
|
||||
import { HTMLTable, loadIssues } from '../../core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import * as epgGrabber from 'epg-grabber'
|
||||
import { EPGGrabber } from 'epg-grabber'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const sites = new Collection<Site>()
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await sitesStorage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await loadIssues()
|
||||
|
||||
logger.info('putting the data together...')
|
||||
const brokenGuideReports = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'broken guide')
|
||||
)
|
||||
for (const domain of folders) {
|
||||
const filteredIssues = brokenGuideReports.filter(
|
||||
(issue: Issue) => domain === issue.data.get('site')
|
||||
)
|
||||
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
|
||||
for (const filepath of files) {
|
||||
const xml = await sitesStorage.load(filepath)
|
||||
const channelsFromXML = EPGGrabber.parseChannelsXML(xml)
|
||||
const channels = new Collection(channelsFromXML).map(
|
||||
(channel: epgGrabber.Channel) => new Channel(channel.toObject())
|
||||
)
|
||||
|
||||
site.totalChannels += channels.count()
|
||||
site.markedChannels += channels.filter((channel: Channel) => channel.xmltv_id).count()
|
||||
}
|
||||
|
||||
sites.add(site)
|
||||
}
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const rows = new Collection<HTMLTableRow>()
|
||||
sites.forEach((site: Site) => {
|
||||
rows.add(
|
||||
new Collection<HTMLTableDataItem>([
|
||||
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
|
||||
{ value: site.totalChannels.toString(), align: 'right' },
|
||||
{ value: site.markedChannels.toString(), align: 'right' },
|
||||
{ value: site.getStatus().emoji, align: 'center' },
|
||||
{ value: site.getIssueUrls().all().join(', ') }
|
||||
])
|
||||
)
|
||||
})
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const table = new HTMLTable(
|
||||
rows,
|
||||
new Collection<HTMLTableColumn>([
|
||||
{ name: 'Site', align: 'left' },
|
||||
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
|
||||
{ name: 'Status', align: 'left' },
|
||||
{ name: 'Notes', align: 'left' }
|
||||
])
|
||||
)
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
|
||||
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
|
||||
await rootStorage.save('SITES.md', sitesContent)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
export const ROOT_DIR = process.env.ROOT_DIR || '.'
|
||||
export const SITES_DIR = process.env.SITES_DIR || './sites'
|
||||
export const GUIDES_DIR = process.env.GUIDES_DIR || './guides'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const API_DIR = process.env.API_DIR || '.api'
|
||||
export const DOT_SITES_DIR = process.env.DOT_SITES_DIR || './.sites'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'epg'
|
||||
export const ROOT_DIR = process.env.ROOT_DIR || '.'
|
||||
export const SITES_DIR = process.env.SITES_DIR || './sites'
|
||||
export const GUIDES_DIR = process.env.GUIDES_DIR || './guides'
|
||||
export const DATA_DIR = process.env.DATA_DIR || './temp/data'
|
||||
export const API_DIR = process.env.API_DIR || '.api'
|
||||
export const DOT_SITES_DIR = process.env.DOT_SITES_DIR || './.sites'
|
||||
export const TESTING = process.env.NODE_ENV === 'test' ? true : false
|
||||
export const OWNER = 'iptv-org'
|
||||
export const REPO = 'epg'
|
||||
export const EOL = '\r\n'
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
import axios, { AxiosInstance, AxiosResponse, AxiosRequestConfig } from 'axios'
|
||||
|
||||
export class ApiClient {
|
||||
instance: AxiosInstance
|
||||
|
||||
constructor() {
|
||||
this.instance = axios.create({
|
||||
baseURL: 'https://iptv-org.github.io/api',
|
||||
responseType: 'stream'
|
||||
})
|
||||
}
|
||||
|
||||
get(url: string, options: AxiosRequestConfig): Promise<AxiosResponse> {
|
||||
return this.instance.get(url, options)
|
||||
}
|
||||
}
|
||||
@@ -1,22 +0,0 @@
|
||||
import { parseChannels } from 'epg-grabber'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ChannelList } from '../models'
|
||||
|
||||
interface ChannelsParserProps {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export class ChannelsParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: ChannelsParserProps) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string): Promise<ChannelList> {
|
||||
const content = await this.storage.load(filepath)
|
||||
const parsed = parseChannels(content)
|
||||
|
||||
return new ChannelList({ channels: parsed })
|
||||
}
|
||||
}
|
||||
@@ -1,32 +0,0 @@
|
||||
import { SiteConfig } from 'epg-grabber'
|
||||
import { pathToFileURL } from 'url'
|
||||
|
||||
export class ConfigLoader {
|
||||
async load(filepath: string): Promise<SiteConfig> {
|
||||
const fileUrl = pathToFileURL(filepath).toString()
|
||||
const config = (await import(fileUrl)).default
|
||||
const defaultConfig = {
|
||||
days: 1,
|
||||
delay: 0,
|
||||
output: 'guide.xml',
|
||||
request: {
|
||||
method: 'GET',
|
||||
maxContentLength: 5242880,
|
||||
timeout: 30000,
|
||||
withCredentials: true,
|
||||
jar: null,
|
||||
responseType: 'arraybuffer',
|
||||
cache: false,
|
||||
headers: null,
|
||||
data: null
|
||||
},
|
||||
maxConnections: 1,
|
||||
site: undefined,
|
||||
url: undefined,
|
||||
parser: undefined,
|
||||
channels: undefined
|
||||
}
|
||||
|
||||
return { ...defaultConfig, ...config } as SiteConfig
|
||||
}
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
import type { DataLoaderProps, DataLoaderData } from '../types/dataLoader'
|
||||
import cliProgress, { MultiBar } from 'cli-progress'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ApiClient } from './apiClient'
|
||||
import numeral from 'numeral'
|
||||
|
||||
export class DataLoader {
|
||||
client: ApiClient
|
||||
storage: Storage
|
||||
progressBar: MultiBar
|
||||
|
||||
constructor(props: DataLoaderProps) {
|
||||
this.client = new ApiClient()
|
||||
this.storage = props.storage
|
||||
this.progressBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? numeral(payload.speed).format('0.0 b') + '/s' : 'N/A'
|
||||
const total = numeral(params.total).format('0.0 b')
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async load(): Promise<DataLoaderData> {
|
||||
const [
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
] = await Promise.all([
|
||||
this.storage.json('countries.json'),
|
||||
this.storage.json('regions.json'),
|
||||
this.storage.json('subdivisions.json'),
|
||||
this.storage.json('languages.json'),
|
||||
this.storage.json('categories.json'),
|
||||
this.storage.json('blocklist.json'),
|
||||
this.storage.json('channels.json'),
|
||||
this.storage.json('feeds.json'),
|
||||
this.storage.json('timezones.json'),
|
||||
this.storage.json('guides.json'),
|
||||
this.storage.json('streams.json'),
|
||||
this.storage.json('logos.json')
|
||||
])
|
||||
|
||||
return {
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
}
|
||||
}
|
||||
|
||||
async download(filename: string) {
|
||||
if (!this.storage || !this.progressBar) return
|
||||
|
||||
const stream = await this.storage.createStream(filename)
|
||||
const progressBar = this.progressBar.create(0, 0, { filename })
|
||||
|
||||
this.client
|
||||
.get(filename, {
|
||||
responseType: 'stream',
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
response.data.pipe(stream)
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,55 +0,0 @@
|
||||
import { Channel, Feed, GuideChannel, Logo, Stream } from '../models'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export class DataProcessor {
|
||||
|
||||
process(data: DataLoaderData) {
|
||||
let channels = new Collection(data.channels).map(data => new Channel(data))
|
||||
const channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
const guideChannels = new Collection(data.guides).map(data => new GuideChannel(data))
|
||||
const guideChannelsGroupedByStreamId = guideChannels.groupBy((channel: GuideChannel) =>
|
||||
channel.getStreamId()
|
||||
)
|
||||
|
||||
const streams = new Collection(data.streams).map(data => new Stream(data))
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
let feeds = new Collection(data.feeds).map(data =>
|
||||
new Feed(data)
|
||||
.withGuideChannels(guideChannelsGroupedByStreamId)
|
||||
.withStreams(streamsGroupedById)
|
||||
.withChannel(channelsKeyById)
|
||||
)
|
||||
const feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
|
||||
|
||||
const logos = new Collection(data.logos).map(data =>
|
||||
new Logo(data).withFeed(feedsKeyByStreamId)
|
||||
)
|
||||
const logosGroupedByChannelId = logos.groupBy((logo: Logo) => logo.channelId)
|
||||
const logosGroupedByStreamId = logos.groupBy((logo: Logo) => logo.getStreamId())
|
||||
|
||||
feeds = feeds.map((feed: Feed) => feed.withLogos(logosGroupedByStreamId))
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||
|
||||
channels = channels.map((channel: Channel) =>
|
||||
channel.withFeeds(feedsGroupedByChannelId).withLogos(logosGroupedByChannelId)
|
||||
)
|
||||
|
||||
return {
|
||||
guideChannelsGroupedByStreamId,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
streamsGroupedById,
|
||||
feedsKeyByStreamId,
|
||||
channelsKeyById,
|
||||
guideChannels,
|
||||
channels,
|
||||
streams,
|
||||
feeds,
|
||||
logos
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,14 +0,0 @@
|
||||
import dayjs from 'dayjs'
|
||||
import utc from 'dayjs/plugin/utc'
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = {}
|
||||
|
||||
date.getUTC = function (d = null) {
|
||||
if (typeof d === 'string') return dayjs.utc(d).startOf('d')
|
||||
|
||||
return dayjs.utc().startOf('d')
|
||||
}
|
||||
|
||||
export default date
|
||||
@@ -1,105 +0,0 @@
|
||||
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Queue, ProxyParser } from './'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { TaskQueue, PromisyClass } from 'cwait'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
|
||||
interface GrabberProps {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
}
|
||||
|
||||
export class Grabber {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
grabber: EPGGrabber | EPGGrabberMock
|
||||
|
||||
constructor({ logger, queue, options }: GrabberProps) {
|
||||
this.logger = logger
|
||||
this.queue = queue
|
||||
this.options = options
|
||||
this.grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
|
||||
}
|
||||
|
||||
async grab(): Promise<{ channels: Collection; programs: Collection }> {
|
||||
const proxyParser = new ProxyParser()
|
||||
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
|
||||
|
||||
const total = this.queue.size()
|
||||
|
||||
const channels = new Collection()
|
||||
let programs = new Collection()
|
||||
let i = 1
|
||||
|
||||
await Promise.all(
|
||||
this.queue.items().map(
|
||||
taskQueue.wrap(
|
||||
async (queueItem: { channel: Channel; config: SiteConfig; date: string }) => {
|
||||
const { channel, config, date } = queueItem
|
||||
|
||||
channels.add(channel)
|
||||
|
||||
if (this.options.timeout !== undefined) {
|
||||
const timeout = parseInt(this.options.timeout)
|
||||
config.request = { ...config.request, ...{ timeout } }
|
||||
}
|
||||
|
||||
if (this.options.delay !== undefined) {
|
||||
const delay = parseInt(this.options.delay)
|
||||
config.delay = delay
|
||||
}
|
||||
|
||||
if (this.options.proxy !== undefined) {
|
||||
const proxy = proxyParser.parse(this.options.proxy)
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(this.options.proxy)
|
||||
|
||||
config.request = {
|
||||
...config.request,
|
||||
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
|
||||
}
|
||||
} else {
|
||||
config.request = { ...config.request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.curl === true) {
|
||||
config.curl = true
|
||||
}
|
||||
|
||||
const _programs = await this.grabber.grab(
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
(data: GrabCallbackData, error: Error | null) => {
|
||||
const { programs, date } = data
|
||||
|
||||
this.logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${
|
||||
channel.xmltv_id
|
||||
} - ${date.format('MMM D, YYYY')} (${programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (error) {
|
||||
this.logger.info(` ERR: ${error.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
programs = programs.concat(new Collection(_programs))
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return { channels, programs }
|
||||
}
|
||||
}
|
||||
@@ -1,111 +0,0 @@
|
||||
import { Collection, Logger, Zip, Storage, StringTemplate } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { OptionValues } from 'commander'
|
||||
import { Channel, Feed, Guide } from '../models'
|
||||
import path from 'path'
|
||||
import { DataLoader, DataProcessor } from '.'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { DataProcessorData } from '../types/dataProcessor'
|
||||
import { DATA_DIR } from '../constants'
|
||||
|
||||
interface GuideManagerProps {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
}
|
||||
|
||||
export class GuideManager {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
|
||||
constructor({ channels, programs, logger, options }: GuideManagerProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
}
|
||||
|
||||
async createGuides() {
|
||||
const pathTemplate = new StringTemplate(this.options.output)
|
||||
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { feedsKeyByStreamId, channelsKeyById }: DataProcessorData = processor.process(data)
|
||||
|
||||
const groupedChannels = this.channels
|
||||
.map((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id && !channel.icon) {
|
||||
const foundFeed: Feed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (foundFeed && foundFeed.hasLogo()) {
|
||||
channel.icon = foundFeed.getLogoUrl()
|
||||
} else {
|
||||
const [channelId] = channel.xmltv_id.split('@')
|
||||
const foundChannel: Channel = channelsKeyById.get(channelId)
|
||||
if (foundChannel && foundChannel.hasLogo()) {
|
||||
channel.icon = foundChannel.getLogoUrl()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return channel
|
||||
})
|
||||
.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.index,
|
||||
(channel: epgGrabber.Channel) => channel.xmltv_id
|
||||
])
|
||||
.uniqBy(
|
||||
(channel: epgGrabber.Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`
|
||||
)
|
||||
.groupBy((channel: epgGrabber.Channel) => {
|
||||
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
|
||||
})
|
||||
|
||||
const groupedPrograms = this.programs
|
||||
.orderBy([
|
||||
(program: epgGrabber.Program) => program.channel,
|
||||
(program: epgGrabber.Program) => program.start
|
||||
])
|
||||
.groupBy((program: epgGrabber.Program) => {
|
||||
const lang =
|
||||
program.titles && program.titles.length && program.titles[0].lang
|
||||
? program.titles[0].lang
|
||||
: 'en'
|
||||
|
||||
return pathTemplate.format({ lang, site: program.site || '' })
|
||||
})
|
||||
|
||||
for (const groupKey of groupedPrograms.keys()) {
|
||||
const guide = new Guide({
|
||||
filepath: groupKey,
|
||||
gzip: this.options.gzip,
|
||||
channels: new Collection(groupedChannels.get(groupKey)),
|
||||
programs: new Collection(groupedPrograms.get(groupKey))
|
||||
})
|
||||
|
||||
await this.save(guide)
|
||||
}
|
||||
}
|
||||
|
||||
async save(guide: Guide) {
|
||||
const storage = new Storage(path.dirname(guide.filepath))
|
||||
const xmlFilepath = guide.filepath
|
||||
const xmlFilename = path.basename(xmlFilepath)
|
||||
this.logger.info(` saving to "${xmlFilepath}"...`)
|
||||
const xmltv = guide.toString()
|
||||
await storage.save(xmlFilename, xmltv)
|
||||
|
||||
if (guide.gzip) {
|
||||
const zip = new Zip()
|
||||
const compressed = zip.compress(xmltv)
|
||||
const gzFilepath = `${guide.filepath}.gz`
|
||||
const gzFilename = path.basename(gzFilepath)
|
||||
this.logger.info(` saving to "${gzFilepath}"...`)
|
||||
await storage.save(gzFilename, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,55 +1,45 @@
|
||||
interface Column {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}
|
||||
|
||||
type DataItem = {
|
||||
value: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
|
||||
|
||||
output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
for (const row of this.data) {
|
||||
output += ' <tr>'
|
||||
for (const item of row) {
|
||||
const nowrap = item.nowrap ? ' nowrap' : ''
|
||||
const align = item.align ? ` align="${item.align}"` : ''
|
||||
const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
|
||||
|
||||
output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
import { HTMLTableColumn, HTMLTableDataItem, HTMLTableRow } from '../types/htmlTable'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { EOL } from '../constants'
|
||||
|
||||
export class HTMLTable {
|
||||
rows: Collection<HTMLTableRow>
|
||||
columns: Collection<HTMLTableColumn>
|
||||
|
||||
constructor(rows: Collection<HTMLTableRow>, columns: Collection<HTMLTableColumn>) {
|
||||
this.rows = rows
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = `<table>${EOL}`
|
||||
|
||||
output += ` <thead>${EOL} <tr>`
|
||||
this.columns.forEach((column: HTMLTableColumn) => {
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
|
||||
|
||||
output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
|
||||
})
|
||||
output += `</tr>${EOL} </thead>${EOL}`
|
||||
|
||||
output += ` <tbody>${EOL}`
|
||||
this.rows.forEach((row: HTMLTableRow) => {
|
||||
output += ' <tr>'
|
||||
row.forEach((item: HTMLTableDataItem) => {
|
||||
const nowrap = item.nowrap ? ' nowrap' : ''
|
||||
const align = item.align ? ` align="${item.align}"` : ''
|
||||
const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
|
||||
|
||||
output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
|
||||
})
|
||||
output += `</tr>${EOL}`
|
||||
})
|
||||
output += ` </tbody>${EOL}`
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,4 @@
|
||||
export * from './apiClient'
|
||||
export * from './channelsParser'
|
||||
export * from './configLoader'
|
||||
export * from './dataLoader'
|
||||
export * from './dataProcessor'
|
||||
export * from './grabber'
|
||||
export * from './guideManager'
|
||||
export * from './htmlTable'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './job'
|
||||
export * from './proxyParser'
|
||||
export * from './queue'
|
||||
export * from './queueCreator'
|
||||
export * from './htmlTable'
|
||||
export * from './siteConfig'
|
||||
export * from './utils'
|
||||
export * from './queue'
|
||||
|
||||
@@ -1,37 +0,0 @@
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load(props?: { labels: string[] | string }) {
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
state: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { Issue } from '../models'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
Site: 'site'
|
||||
})
|
||||
|
||||
export class IssueParser {
|
||||
parse(issue: { number: number; body: string; labels: { name: string }[] }): Issue {
|
||||
const fields = issue.body.split('###')
|
||||
|
||||
const data = new Dictionary()
|
||||
fields.forEach((field: string) => {
|
||||
const parsed = field.split(/\r?\n/).filter(Boolean)
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.trim() : ''
|
||||
let _value = parsed.join('\r\n')
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return data
|
||||
|
||||
const id: string = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
data.set(id, value)
|
||||
})
|
||||
|
||||
const labels = issue.labels.map(label => label.name)
|
||||
|
||||
return new Issue({ number: issue.number, labels, data })
|
||||
}
|
||||
}
|
||||
@@ -1,34 +0,0 @@
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { Queue, Grabber, GuideManager } from '.'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
interface JobProps {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
}
|
||||
|
||||
export class Job {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
grabber: Grabber
|
||||
|
||||
constructor({ queue, logger, options }: JobProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.grabber = new Grabber({ logger, queue, options })
|
||||
}
|
||||
|
||||
async run() {
|
||||
const { channels, programs } = await this.grabber.grab()
|
||||
|
||||
const manager = new GuideManager({
|
||||
channels,
|
||||
programs,
|
||||
options: this.options,
|
||||
logger: this.logger
|
||||
})
|
||||
|
||||
await manager.createGuides()
|
||||
}
|
||||
}
|
||||
@@ -1,31 +0,0 @@
|
||||
import { URL } from 'node:url'
|
||||
|
||||
interface ProxyParserResult {
|
||||
protocol: string | null
|
||||
auth?: {
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
host: string
|
||||
port: number | null
|
||||
}
|
||||
|
||||
export class ProxyParser {
|
||||
parse(_url: string): ProxyParserResult {
|
||||
const parsed = new URL(_url)
|
||||
|
||||
const result: ProxyParserResult = {
|
||||
protocol: parsed.protocol.replace(':', '') || null,
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : null
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
result.auth = {}
|
||||
if (parsed.username) result.auth.username = parsed.username
|
||||
if (parsed.password) result.auth.password = parsed.password
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
@@ -1,45 +1,18 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { SiteConfig, Channel } from 'epg-grabber'
|
||||
|
||||
export interface QueueItem {
|
||||
channel: Channel
|
||||
date: string
|
||||
config: SiteConfig
|
||||
error: string | null
|
||||
}
|
||||
|
||||
export class Queue {
|
||||
_data: Dictionary
|
||||
|
||||
constructor() {
|
||||
this._data = new Dictionary()
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return this._data.missing(key)
|
||||
}
|
||||
|
||||
add(
|
||||
key: string,
|
||||
{ channel, config, date }: { channel: Channel; date: string | null; config: SiteConfig }
|
||||
) {
|
||||
this._data.set(key, {
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
error: null
|
||||
})
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return Object.values(this._data.data()).length
|
||||
}
|
||||
|
||||
items(): QueueItem[] {
|
||||
return Object.values(this._data.data()) as QueueItem[]
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this.size() === 0
|
||||
}
|
||||
}
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { QueueItem } from '../types/queue'
|
||||
|
||||
export class Queue {
|
||||
#items: Dictionary<QueueItem> = new Dictionary<QueueItem>()
|
||||
|
||||
add(key: string, data: QueueItem) {
|
||||
this.#items.set(key, data)
|
||||
}
|
||||
|
||||
has(key: string): boolean {
|
||||
return this.#items.has(key)
|
||||
}
|
||||
|
||||
getItems(): Collection<QueueItem> {
|
||||
return new Collection<QueueItem>(Object.values(this.#items.data()))
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +0,0 @@
|
||||
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
||||
import { SITES_DIR, DATA_DIR } from '../constants'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { ConfigLoader, Queue } from './'
|
||||
import { SiteConfig } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
interface QueueCreatorProps {
|
||||
logger: Logger
|
||||
options: GrabOptions
|
||||
channels: Collection
|
||||
}
|
||||
|
||||
export class QueueCreator {
|
||||
configLoader: ConfigLoader
|
||||
logger: Logger
|
||||
sitesStorage: Storage
|
||||
dataStorage: Storage
|
||||
channels: Collection
|
||||
options: GrabOptions
|
||||
|
||||
constructor({ channels, logger, options }: QueueCreatorProps) {
|
||||
this.channels = channels
|
||||
this.logger = logger
|
||||
this.sitesStorage = new Storage()
|
||||
this.dataStorage = new Storage(DATA_DIR)
|
||||
this.options = options
|
||||
this.configLoader = new ConfigLoader()
|
||||
}
|
||||
|
||||
async create(): Promise<Queue> {
|
||||
let index = 0
|
||||
const queue = new Queue()
|
||||
for (const channel of this.channels.all()) {
|
||||
channel.index = index++
|
||||
if (!channel.site || !channel.site_id || !channel.name) continue
|
||||
|
||||
const configPath = path.resolve(SITES_DIR, `${channel.site}/${channel.site}.config.js`)
|
||||
const config: SiteConfig = await this.configLoader.load(configPath)
|
||||
|
||||
if (!channel.xmltv_id) {
|
||||
channel.xmltv_id = channel.site_id
|
||||
}
|
||||
|
||||
const days = this.options.days || config.days || 1
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString())
|
||||
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
|
||||
dates.forEach((date: DateTime) => {
|
||||
const dateString = date.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
if (queue.missing(key)) {
|
||||
queue.add(key, {
|
||||
channel,
|
||||
date: dateString,
|
||||
config
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return queue
|
||||
}
|
||||
}
|
||||
71
scripts/core/siteConfig.ts
Normal file
71
scripts/core/siteConfig.ts
Normal file
@@ -0,0 +1,71 @@
|
||||
import * as epgGrabber from 'epg-grabber'
|
||||
import _ from 'lodash'
|
||||
|
||||
const _default = {
|
||||
days: 1,
|
||||
delay: 0,
|
||||
output: 'guide.xml',
|
||||
request: {
|
||||
method: 'GET',
|
||||
maxContentLength: 5242880,
|
||||
timeout: 30000,
|
||||
withCredentials: true,
|
||||
jar: null,
|
||||
responseType: 'arraybuffer',
|
||||
cache: false,
|
||||
headers: null,
|
||||
data: null
|
||||
},
|
||||
maxConnections: 1,
|
||||
site: undefined,
|
||||
url: undefined,
|
||||
parser: undefined,
|
||||
channels: undefined,
|
||||
lang: 'en',
|
||||
debug: false,
|
||||
gzip: false,
|
||||
curl: false,
|
||||
logo: ''
|
||||
}
|
||||
|
||||
export class SiteConfig {
|
||||
days: number
|
||||
lang: string
|
||||
delay: number
|
||||
debug: boolean
|
||||
gzip: boolean
|
||||
curl: boolean
|
||||
maxConnections: number
|
||||
output: string
|
||||
request: epgGrabber.Types.SiteConfigRequestConfig
|
||||
site: string
|
||||
channels?: string | string[]
|
||||
url: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
|
||||
parser: (
|
||||
context: epgGrabber.Types.SiteConfigParserContext
|
||||
) =>
|
||||
| epgGrabber.Types.SiteConfigParserResult[]
|
||||
| Promise<epgGrabber.Types.SiteConfigParserResult[]>
|
||||
logo: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
|
||||
filepath: string
|
||||
|
||||
constructor(config: epgGrabber.Types.SiteConfigObject) {
|
||||
this.site = config.site
|
||||
this.channels = config.channels
|
||||
this.url = config.url
|
||||
this.parser = config.parser
|
||||
this.filepath = config.filepath
|
||||
|
||||
this.days = config.days || _default.days
|
||||
this.lang = config.lang || _default.lang
|
||||
this.delay = config.delay || _default.delay
|
||||
this.debug = config.debug || _default.debug
|
||||
this.maxConnections = config.maxConnections || _default.maxConnections
|
||||
this.gzip = config.gzip || _default.gzip
|
||||
this.curl = config.curl || _default.curl
|
||||
this.output = config.output || _default.output
|
||||
this.logo = config.logo || _default.logo
|
||||
|
||||
this.request = _.merge(_default.request, config.request)
|
||||
}
|
||||
}
|
||||
106
scripts/core/utils.ts
Normal file
106
scripts/core/utils.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { TESTING, OWNER, REPO, EOL } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Channel } from '../models/channel'
|
||||
import { AxiosProxyConfig } from 'axios'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { pathToFileURL } from 'url'
|
||||
import { Issue } from '../models'
|
||||
import { URL } from 'node:url'
|
||||
|
||||
export function generateChannelsXML(channels: Collection<Channel>): string {
|
||||
let output = `<?xml version="1.0" encoding="UTF-8"?>${EOL}<channels>${EOL}`
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const logo = channel.logo ? ` logo="${escapeString(channel.logo)}"` : ''
|
||||
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id ? escapeString(channel.site_id) : ''
|
||||
const site = channel.site || ''
|
||||
const displayName = channel.name ? escapeString(channel.name) : ''
|
||||
|
||||
output += ` <channel site="${site}" site_id="${site_id}" lang="${lang}"${logo} xmltv_id="${xmltv_id}">${displayName}</channel>${EOL}`
|
||||
})
|
||||
|
||||
output += `</channels>${EOL}`
|
||||
|
||||
return output
|
||||
}
|
||||
|
||||
export function escapeString(value: string, defaultValue = '') {
|
||||
if (!value) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
|
||||
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
|
||||
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
|
||||
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
|
||||
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
|
||||
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
|
||||
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
|
||||
'g'
|
||||
)
|
||||
|
||||
value = String(value || '').replace(regex, '')
|
||||
|
||||
return value
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/\n|\r/g, ' ')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
export function parseProxy(string: string): AxiosProxyConfig {
|
||||
const parsed = new URL(string)
|
||||
|
||||
const proxy: AxiosProxyConfig = {
|
||||
protocol: parsed.protocol.replace(':', ''),
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : 8080
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
proxy.auth = { username: parsed.username, password: parsed.password }
|
||||
}
|
||||
|
||||
return proxy
|
||||
}
|
||||
|
||||
export async function loadJs(filepath: string) {
|
||||
const fileUrl = pathToFileURL(filepath).toString()
|
||||
|
||||
return (await import(fileUrl)).default
|
||||
}
|
||||
|
||||
export async function loadIssues(props?: { labels: string[] | string }) {
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
state: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return new Collection(issues).map(data => new Issue(data))
|
||||
}
|
||||
@@ -1,164 +1,23 @@
|
||||
import { ChannelData, ChannelSearchableData } from '../types/channel'
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Stream, Feed, Logo, GuideChannel } from './'
|
||||
|
||||
export class Channel {
|
||||
id?: string
|
||||
name?: string
|
||||
altNames?: Collection
|
||||
network?: string
|
||||
owners?: Collection
|
||||
countryCode?: string
|
||||
subdivisionCode?: string
|
||||
cityName?: string
|
||||
categoryIds?: Collection
|
||||
isNSFW = false
|
||||
launched?: string
|
||||
closed?: string
|
||||
replacedBy?: string
|
||||
website?: string
|
||||
feeds?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data?: ChannelData) {
|
||||
if (!data) return
|
||||
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.altNames = new Collection(data.alt_names)
|
||||
this.network = data.network || undefined
|
||||
this.owners = new Collection(data.owners)
|
||||
this.countryCode = data.country
|
||||
this.subdivisionCode = data.subdivision || undefined
|
||||
this.cityName = data.city || undefined
|
||||
this.categoryIds = new Collection(data.categories)
|
||||
this.isNSFW = data.is_nsfw
|
||||
this.launched = data.launched || undefined
|
||||
this.closed = data.closed || undefined
|
||||
this.replacedBy = data.replaced_by || undefined
|
||||
this.website = data.website || undefined
|
||||
}
|
||||
|
||||
withFeeds(feedsGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.logos = new Collection(logosGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getFeeds(): Collection {
|
||||
if (!this.feeds) return new Collection()
|
||||
|
||||
return this.feeds
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
let channels = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
channels = channels.concat(feed.getGuideChannels())
|
||||
})
|
||||
|
||||
return channels
|
||||
}
|
||||
|
||||
getGuideChannelNames(): Collection {
|
||||
return this.getGuideChannels()
|
||||
.map((channel: GuideChannel) => channel.siteName)
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
let streams = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
streams = streams.concat(feed.getStreams())
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
|
||||
getStreamNames(): Collection {
|
||||
return this.getStreams()
|
||||
.map((stream: Stream) => stream.getName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getFeedFullNames(): Collection {
|
||||
return this.getFeeds()
|
||||
.map((feed: Feed) => feed.getFullName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getAltNames(): Collection {
|
||||
return this.altNames || new Collection()
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function feed(logo: Logo): number {
|
||||
if (!logo.feed) return 1
|
||||
if (logo.feed.isMain) return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: Record<string, number> = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([feed, format, size], ['desc', 'desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
|
||||
getSearchable(): ChannelSearchableData {
|
||||
return {
|
||||
id: this.getId(),
|
||||
name: this.getName(),
|
||||
altNames: this.getAltNames().all(),
|
||||
guideNames: this.getGuideChannelNames().all(),
|
||||
streamNames: this.getStreamNames().all(),
|
||||
feedFullNames: this.getFeedFullNames().all()
|
||||
}
|
||||
}
|
||||
}
|
||||
import { ChannelGuideObject } from '../types/channel'
|
||||
import * as epgGrabber from 'epg-grabber'
|
||||
import { SITES_DIR } from '../constants'
|
||||
import path from 'node:path'
|
||||
|
||||
export class Channel extends epgGrabber.Channel {
|
||||
getGuideObject(): ChannelGuideObject {
|
||||
const [channelId, feedId] = this.xmltv_id.split('@')
|
||||
|
||||
return {
|
||||
channel: channelId || null,
|
||||
feed: feedId || null,
|
||||
site: this.site,
|
||||
site_id: this.site_id,
|
||||
site_name: this.name,
|
||||
lang: this.lang || 'en'
|
||||
}
|
||||
}
|
||||
|
||||
getConfigPath(): string {
|
||||
return path.resolve(SITES_DIR, `${this.site}/${this.site}.config.js`)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
|
||||
export class ChannelList {
|
||||
channels: Collection = new Collection()
|
||||
|
||||
constructor(data: { channels: epgGrabber.Channel[] }) {
|
||||
this.channels = new Collection(data.channels)
|
||||
}
|
||||
|
||||
add(channel: epgGrabber.Channel): this {
|
||||
this.channels.add(channel)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
get(siteId: string): epgGrabber.Channel | undefined {
|
||||
return this.channels.find((channel: epgGrabber.Channel) => channel.site_id == siteId)
|
||||
}
|
||||
|
||||
sort(): this {
|
||||
this.channels = this.channels.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.lang || '_',
|
||||
(channel: epgGrabber.Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: epgGrabber.Channel) => channel.site_id
|
||||
])
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toString() {
|
||||
function escapeString(value: string, defaultValue = '') {
|
||||
if (!value) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
|
||||
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
|
||||
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
|
||||
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
|
||||
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
|
||||
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
|
||||
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
|
||||
'g'
|
||||
)
|
||||
|
||||
value = String(value || '').replace(regex, '')
|
||||
|
||||
return value
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/\n|\r/g, ' ')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
let output = '<?xml version="1.0" encoding="UTF-8"?>\r\n<channels>\r\n'
|
||||
|
||||
this.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
const site = channel.site || ''
|
||||
const displayName = channel.name ? escapeString(channel.name) : ''
|
||||
|
||||
output += ` <channel site="${site}" lang="${lang}" xmltv_id="${xmltv_id}" site_id="${site_id}"${logo}>${displayName}</channel>\r\n`
|
||||
})
|
||||
|
||||
output += '</channels>\r\n'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
@@ -1,124 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { FeedData } from '../types/feed'
|
||||
import { Logo, Channel } from '.'
|
||||
|
||||
export class Feed {
|
||||
channelId: string
|
||||
channel?: Channel
|
||||
id: string
|
||||
name: string
|
||||
isMain: boolean
|
||||
broadcastAreaCodes: Collection
|
||||
languageCodes: Collection
|
||||
timezoneIds: Collection
|
||||
videoFormat: string
|
||||
guideChannels?: Collection
|
||||
streams?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data: FeedData) {
|
||||
this.channelId = data.channel
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.isMain = data.is_main
|
||||
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||
this.languageCodes = new Collection(data.languages)
|
||||
this.timezoneIds = new Collection(data.timezones)
|
||||
this.videoFormat = data.video_format
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withStreams(streamsGroupedById: Dictionary): this {
|
||||
this.streams = new Collection(streamsGroupedById.get(`${this.channelId}@${this.id}`))
|
||||
|
||||
if (this.isMain) {
|
||||
this.streams = this.streams.concat(new Collection(streamsGroupedById.get(this.channelId)))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withGuideChannels(guideChannelsGroupedByStreamId: Dictionary): this {
|
||||
this.guideChannels = new Collection(
|
||||
guideChannelsGroupedByStreamId.get(`${this.channelId}@${this.id}`)
|
||||
)
|
||||
|
||||
if (this.isMain) {
|
||||
this.guideChannels = this.guideChannels.concat(
|
||||
new Collection(guideChannelsGroupedByStreamId.get(this.channelId))
|
||||
)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByStreamId: Dictionary): this {
|
||||
this.logos = new Collection(logosGroupedByStreamId.get(this.getStreamId()))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
if (!this.guideChannels) return new Collection()
|
||||
|
||||
return this.guideChannels
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
if (!this.streams) return new Collection()
|
||||
|
||||
return this.streams
|
||||
}
|
||||
|
||||
getFullName(): string {
|
||||
if (!this.channel) return ''
|
||||
|
||||
return `${this.channel.name} ${this.name}`
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
return `${this.channelId}@${this.id}`
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: Record<string, number> = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([format, size], ['desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
}
|
||||
@@ -1,35 +1,59 @@
|
||||
import { Collection, DateTime } from '@freearhey/core'
|
||||
import { generateXMLTV } from 'epg-grabber'
|
||||
|
||||
interface GuideData {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
}
|
||||
|
||||
export class Guide {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
|
||||
constructor({ channels, programs, filepath, gzip }: GuideData) {
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.filepath = filepath
|
||||
this.gzip = gzip || false
|
||||
}
|
||||
|
||||
toString() {
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString(), {
|
||||
timezone: 'UTC'
|
||||
})
|
||||
|
||||
return generateXMLTV({
|
||||
channels: this.channels.all(),
|
||||
programs: this.programs.all(),
|
||||
date: currDate.toJSON()
|
||||
})
|
||||
}
|
||||
}
|
||||
import { Collection, Logger } from '@freearhey/core'
|
||||
import { Storage } from '@freearhey/storage-js'
|
||||
import { EPGGrabber } from 'epg-grabber'
|
||||
import { Channel, Program } from '.'
|
||||
import utc from 'dayjs/plugin/utc'
|
||||
import dayjs from 'dayjs'
|
||||
import path from 'node:path'
|
||||
import pako from 'pako'
|
||||
|
||||
dayjs.extend(utc)
|
||||
|
||||
interface GuideData {
|
||||
channels: Collection<Channel>
|
||||
programs: Collection<Program>
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
}
|
||||
|
||||
export class Guide {
|
||||
channels: Collection<Channel>
|
||||
programs: Collection<Program>
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
|
||||
constructor(data: GuideData) {
|
||||
this.channels = data.channels
|
||||
this.programs = data.programs
|
||||
this.filepath = data.filepath
|
||||
this.gzip = data.gzip || false
|
||||
}
|
||||
|
||||
addChannel(channel: Channel) {
|
||||
this.channels.add(channel)
|
||||
}
|
||||
|
||||
toString() {
|
||||
const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
|
||||
|
||||
return EPGGrabber.generateXMLTV(this.channels.all(), this.programs.all(), currDate)
|
||||
}
|
||||
|
||||
async save({ logger }: { logger: Logger }) {
|
||||
const dir = path.dirname(this.filepath)
|
||||
const storage = new Storage(dir)
|
||||
const xmlFilepath = this.filepath
|
||||
const xmlFilename = path.basename(xmlFilepath)
|
||||
logger.info(` saving to "${xmlFilepath}"...`)
|
||||
const xmltv = this.toString()
|
||||
await storage.save(xmlFilename, xmltv)
|
||||
|
||||
if (this.gzip) {
|
||||
const compressed = pako.gzip(xmltv)
|
||||
const gzFilepath = `${this.filepath}.gz`
|
||||
const gzFilename = path.basename(gzFilepath)
|
||||
logger.info(` saving to "${gzFilepath}"...`)
|
||||
await storage.save(gzFilename, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Feed, Channel } from '.'
|
||||
|
||||
export class GuideChannel {
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
xmltvId?: string
|
||||
languageCode?: string
|
||||
siteId?: string
|
||||
logoUrl?: string
|
||||
siteDomain?: string
|
||||
siteName?: string
|
||||
|
||||
constructor(data: epgGrabber.Channel) {
|
||||
const [channelId, feedId] = data.xmltv_id ? data.xmltv_id.split('@') : [undefined, undefined]
|
||||
|
||||
this.channelId = channelId
|
||||
this.feedId = feedId
|
||||
this.xmltvId = data.xmltv_id
|
||||
this.languageCode = data.lang
|
||||
this.siteId = data.site_id
|
||||
this.logoUrl = data.logo
|
||||
this.siteDomain = data.site
|
||||
this.siteName = data.name
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
if (this.channelId) this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (this.feedId) this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channelId || null,
|
||||
feed: this.feedId || null,
|
||||
site: this.siteDomain || '',
|
||||
site_id: this.siteId || '',
|
||||
site_name: this.siteName || '',
|
||||
lang: this.languageCode || ''
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,9 +1,5 @@
|
||||
export * from './channel'
|
||||
export * from './feed'
|
||||
export * from './guide'
|
||||
export * from './guideChannel'
|
||||
export * from './issue'
|
||||
export * from './logo'
|
||||
export * from './site'
|
||||
export * from './stream'
|
||||
export * from './channelList'
|
||||
export * from './guide'
|
||||
export * from './issue'
|
||||
export * from './site'
|
||||
export * from './channel'
|
||||
export * from './program'
|
||||
|
||||
@@ -1,24 +1,47 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { OWNER, REPO } from '../constants'
|
||||
|
||||
interface IssueProps {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
import { EOL, OWNER, REPO } from '../constants'
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
|
||||
const FIELDS = new Dictionary({
|
||||
Site: 'site'
|
||||
})
|
||||
|
||||
interface IssueData {
|
||||
number: number
|
||||
body: string
|
||||
labels: { name: string }[]
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary<string>
|
||||
|
||||
constructor(issue: IssueData) {
|
||||
const fields = typeof issue.body === 'string' ? issue.body.split('###') : []
|
||||
|
||||
this.data = new Dictionary<string>()
|
||||
fields.forEach((field: string) => {
|
||||
const parsed = field.split(/\r?\n/).filter(Boolean)
|
||||
let _label = parsed.shift()
|
||||
_label = _label ? _label.trim() : ''
|
||||
let _value = parsed.join(EOL)
|
||||
_value = _value ? _value.trim() : ''
|
||||
|
||||
if (!_label || !_value) return
|
||||
|
||||
const id: string | undefined = FIELDS.get(_label)
|
||||
const value: string = _value === '_No response_' || _value === 'None' ? '' : _value
|
||||
|
||||
if (!id) return
|
||||
|
||||
this.data.set(id, value)
|
||||
})
|
||||
|
||||
this.labels = issue.labels.map(label => label.name)
|
||||
this.number = issue.number
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,41 +0,0 @@
|
||||
import { Collection, type Dictionary } from '@freearhey/core'
|
||||
import type { LogoData } from '../types/logo'
|
||||
import { type Feed } from './feed'
|
||||
|
||||
export class Logo {
|
||||
channelId?: string
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
tags: Collection = new Collection()
|
||||
width = 0
|
||||
height = 0
|
||||
format?: string
|
||||
url?: string
|
||||
|
||||
constructor(data?: LogoData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.feedId = data.feed || undefined
|
||||
this.tags = new Collection(data.tags)
|
||||
this.width = data.width
|
||||
this.height = data.height
|
||||
this.format = data.format || undefined
|
||||
this.url = data.url
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (!this.feedId) return this
|
||||
|
||||
this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
}
|
||||
3
scripts/models/program.ts
Normal file
3
scripts/models/program.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
import * as epgGrabber from 'epg-grabber'
|
||||
|
||||
export class Program extends epgGrabber.Program {}
|
||||
@@ -1,63 +1,63 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
interface Status {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
interface SiteProps {
|
||||
domain: string
|
||||
totalChannels?: number
|
||||
markedChannels?: number
|
||||
issues: Collection
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
totalChannels: number
|
||||
markedChannels: number
|
||||
issues: Collection
|
||||
|
||||
constructor({ domain, totalChannels = 0, markedChannels = 0, issues }: SiteProps) {
|
||||
this.domain = domain
|
||||
this.totalChannels = totalChannels
|
||||
this.markedChannels = markedChannels
|
||||
this.issues = issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.notEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.notEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssues(): Collection {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
export interface Status {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
export interface SiteData {
|
||||
domain: string
|
||||
totalChannels?: number
|
||||
markedChannels?: number
|
||||
issues: Collection<Issue>
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
totalChannels: number
|
||||
markedChannels: number
|
||||
issues: Collection<Issue>
|
||||
|
||||
constructor(data: SiteData) {
|
||||
this.domain = data.domain
|
||||
this.totalChannels = data.totalChannels || 0
|
||||
this.markedChannels = data.markedChannels || 0
|
||||
this.issues = data.issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.isNotEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.isNotEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssueUrls(): Collection<string> {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,58 +0,0 @@
|
||||
import type { StreamData } from '../types/stream'
|
||||
import { Feed, Channel } from './index'
|
||||
|
||||
export class Stream {
|
||||
name?: string
|
||||
url: string
|
||||
id?: string
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
filepath?: string
|
||||
line?: number
|
||||
label?: string
|
||||
verticalResolution?: number
|
||||
isInterlaced?: boolean
|
||||
referrer?: string
|
||||
userAgent?: string
|
||||
groupTitle = 'Undefined'
|
||||
removed = false
|
||||
|
||||
constructor(data: StreamData) {
|
||||
const id = data.channel && data.feed ? [data.channel, data.feed].join('@') : data.channel
|
||||
const { verticalResolution, isInterlaced } = parseQuality(data.quality)
|
||||
|
||||
this.id = id || undefined
|
||||
this.channelId = data.channel || undefined
|
||||
this.feedId = data.feed || undefined
|
||||
this.name = data.name || undefined
|
||||
this.url = data.url
|
||||
this.referrer = data.referrer || undefined
|
||||
this.userAgent = data.user_agent || undefined
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
this.label = data.label || undefined
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
}
|
||||
|
||||
function parseQuality(quality: string | null): {
|
||||
verticalResolution: number | null
|
||||
isInterlaced: boolean | null
|
||||
} {
|
||||
if (!quality) return { verticalResolution: null, isInterlaced: null }
|
||||
const [, verticalResolutionString] = quality.match(/^(\d+)/) || [null, undefined]
|
||||
const isInterlaced = /i$/i.test(quality)
|
||||
let verticalResolution = 0
|
||||
if (verticalResolutionString) verticalResolution = parseInt(verticalResolutionString)
|
||||
|
||||
return { verticalResolution, isInterlaced }
|
||||
}
|
||||
35
scripts/types/channel.d.ts
vendored
35
scripts/types/channel.d.ts
vendored
@@ -1,27 +1,8 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export interface ChannelData {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
categories: Collection
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
}
|
||||
|
||||
export interface ChannelSearchableData {
|
||||
id: string
|
||||
name: string
|
||||
altNames: string[]
|
||||
guideNames: string[]
|
||||
streamNames: string[]
|
||||
feedFullNames: string[]
|
||||
}
|
||||
export interface ChannelGuideObject {
|
||||
channel: string | null
|
||||
feed: string | null
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
20
scripts/types/dataLoader.d.ts
vendored
20
scripts/types/dataLoader.d.ts
vendored
@@ -1,20 +0,0 @@
|
||||
import { Storage } from '@freearhey/core'
|
||||
|
||||
export interface DataLoaderProps {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export interface DataLoaderData {
|
||||
countries: object | object[]
|
||||
regions: object | object[]
|
||||
subdivisions: object | object[]
|
||||
languages: object | object[]
|
||||
categories: object | object[]
|
||||
blocklist: object | object[]
|
||||
channels: object | object[]
|
||||
feeds: object | object[]
|
||||
timezones: object | object[]
|
||||
guides: object | object[]
|
||||
streams: object | object[]
|
||||
logos: object | object[]
|
||||
}
|
||||
16
scripts/types/dataProcessor.d.ts
vendored
16
scripts/types/dataProcessor.d.ts
vendored
@@ -1,16 +0,0 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
|
||||
export interface DataProcessorData {
|
||||
guideChannelsGroupedByStreamId: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
logosGroupedByChannelId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
feedsKeyByStreamId: Dictionary
|
||||
streamsGroupedById: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
guideChannels: Collection
|
||||
channels: Collection
|
||||
streams: Collection
|
||||
feeds: Collection
|
||||
logos: Collection
|
||||
}
|
||||
12
scripts/types/feed.d.ts
vendored
12
scripts/types/feed.d.ts
vendored
@@ -1,12 +0,0 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export interface FeedData {
|
||||
channel: string
|
||||
id: string
|
||||
name: string
|
||||
is_main: boolean
|
||||
broadcast_area: Collection
|
||||
languages: Collection
|
||||
timezones: Collection
|
||||
video_format: string
|
||||
}
|
||||
8
scripts/types/guide.d.ts
vendored
8
scripts/types/guide.d.ts
vendored
@@ -1,8 +0,0 @@
|
||||
export interface GuideData {
|
||||
channel: string
|
||||
feed: string
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
17
scripts/types/htmlTable.d.ts
vendored
Normal file
17
scripts/types/htmlTable.d.ts
vendored
Normal file
@@ -0,0 +1,17 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export interface HTMLTableColumn {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}
|
||||
|
||||
export interface HTMLTableDataItem {
|
||||
value: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}
|
||||
|
||||
export type HTMLTableRow = Collection<HTMLTableDataItem>
|
||||
9
scripts/types/logo.d.ts
vendored
9
scripts/types/logo.d.ts
vendored
@@ -1,9 +0,0 @@
|
||||
export interface LogoData {
|
||||
channel: string
|
||||
feed: string | null
|
||||
tags: string[]
|
||||
width: number
|
||||
height: number
|
||||
format: string | null
|
||||
url: string
|
||||
}
|
||||
10
scripts/types/queue.d.ts
vendored
Normal file
10
scripts/types/queue.d.ts
vendored
Normal file
@@ -0,0 +1,10 @@
|
||||
import { SiteConfig } from '../core/siteConfig'
|
||||
import { Channel } from '../models/channel'
|
||||
import { Dayjs } from 'dayjs'
|
||||
|
||||
export interface QueueItem {
|
||||
channel: Channel
|
||||
date: Dayjs
|
||||
siteConfig: SiteConfig
|
||||
error: string | null
|
||||
}
|
||||
10
scripts/types/stream.d.ts
vendored
10
scripts/types/stream.d.ts
vendored
@@ -1,10 +0,0 @@
|
||||
export interface StreamData {
|
||||
channel: string | null
|
||||
feed: string | null
|
||||
name?: string
|
||||
url: string
|
||||
referrer: string | null
|
||||
user_agent: string | null
|
||||
quality: string | null
|
||||
label: string | null
|
||||
}
|
||||
Reference in New Issue
Block a user