mirror of
https://github.com/iptv-org/epg
synced 2025-12-14 09:26:41 -05:00
stricter ESLint configuration, linebreak on stylistic per deprecation by ESLint, fixed changes. add attibutes to prevent blockade.
This commit is contained in:
2
.gitattributes
vendored
Normal file
2
.gitattributes
vendored
Normal file
@@ -0,0 +1,2 @@
|
||||
# Enforce the usage of CRLF in GitHub Actions per ESLint configuration.
|
||||
* text eol=crlf
|
||||
@@ -1,55 +1,57 @@
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin'
|
||||
import globals from 'globals'
|
||||
import tsParser from '@typescript-eslint/parser'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import js from '@eslint/js'
|
||||
import { FlatCompat } from '@eslint/eslintrc'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = path.dirname(__filename)
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
})
|
||||
|
||||
export default [
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/recommended', 'prettier'),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.jest
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module'
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'off',
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'no-case-declarations': 'off',
|
||||
'linebreak-style': ['error', process.env.CI ? 'unix' : 'windows'],
|
||||
|
||||
quotes: [
|
||||
'error',
|
||||
'single',
|
||||
{
|
||||
avoidEscape: true
|
||||
}
|
||||
],
|
||||
|
||||
semi: ['error', 'never']
|
||||
}
|
||||
},
|
||||
{
|
||||
ignores: ['tests/__data__/']
|
||||
}
|
||||
]
|
||||
import typescriptEslint from '@typescript-eslint/eslint-plugin'
|
||||
import stylistic from '@stylistic/eslint-plugin'
|
||||
import globals from 'globals'
|
||||
import tsParser from '@typescript-eslint/parser'
|
||||
import path from 'node:path'
|
||||
import { fileURLToPath } from 'node:url'
|
||||
import js from '@eslint/js'
|
||||
import { FlatCompat } from '@eslint/eslintrc'
|
||||
|
||||
const __filename = fileURLToPath(import.meta.url)
|
||||
const __dirname = path.dirname(__filename)
|
||||
const compat = new FlatCompat({
|
||||
baseDirectory: __dirname,
|
||||
recommendedConfig: js.configs.recommended,
|
||||
allConfig: js.configs.all
|
||||
})
|
||||
|
||||
export default [
|
||||
...compat.extends('eslint:recommended', 'plugin:@typescript-eslint/strict', 'plugin:@typescript-eslint/stylistic', 'prettier'),
|
||||
{
|
||||
plugins: {
|
||||
'@typescript-eslint': typescriptEslint,
|
||||
'@stylistic': stylistic
|
||||
},
|
||||
|
||||
languageOptions: {
|
||||
globals: {
|
||||
...globals.node,
|
||||
...globals.jest
|
||||
},
|
||||
|
||||
parser: tsParser,
|
||||
ecmaVersion: 'latest',
|
||||
sourceType: 'module'
|
||||
},
|
||||
|
||||
rules: {
|
||||
'@typescript-eslint/no-require-imports': 'off',
|
||||
'@typescript-eslint/no-var-requires': 'off',
|
||||
'no-case-declarations': 'off',
|
||||
'@stylistic/linebreak-style': ['error', 'windows'],
|
||||
|
||||
quotes: [
|
||||
'error',
|
||||
'single',
|
||||
{
|
||||
avoidEscape: true
|
||||
}
|
||||
],
|
||||
|
||||
semi: ['error', 'never']
|
||||
}
|
||||
},
|
||||
{
|
||||
ignores: ['tests/__data__/']
|
||||
}
|
||||
]
|
||||
|
||||
45
package-lock.json
generated
45
package-lock.json
generated
@@ -18,6 +18,7 @@
|
||||
"@octokit/core": "^7.0.3",
|
||||
"@octokit/plugin-paginate-rest": "^13.1.1",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^16.0.0",
|
||||
"@stylistic/eslint-plugin": "^5.2.2",
|
||||
"@swc/core": "^1.13.2",
|
||||
"@swc/jest": "^0.2.39",
|
||||
"@types/cli-progress": "^3.11.6",
|
||||
@@ -3146,6 +3147,50 @@
|
||||
"@sinonjs/commons": "^3.0.1"
|
||||
}
|
||||
},
|
||||
"node_modules/@stylistic/eslint-plugin": {
|
||||
"version": "5.2.2",
|
||||
"resolved": "https://registry.npmjs.org/@stylistic/eslint-plugin/-/eslint-plugin-5.2.2.tgz",
|
||||
"integrity": "sha512-bE2DUjruqXlHYP3Q2Gpqiuj2bHq7/88FnuaS0FjeGGLCy+X6a07bGVuwtiOYnPSLHR6jmx5Bwdv+j7l8H+G97A==",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
"@eslint-community/eslint-utils": "^4.7.0",
|
||||
"@typescript-eslint/types": "^8.37.0",
|
||||
"eslint-visitor-keys": "^4.2.1",
|
||||
"espree": "^10.4.0",
|
||||
"estraverse": "^5.3.0",
|
||||
"picomatch": "^4.0.3"
|
||||
},
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"eslint": ">=9.0.0"
|
||||
}
|
||||
},
|
||||
"node_modules/@stylistic/eslint-plugin/node_modules/eslint-visitor-keys": {
|
||||
"version": "4.2.1",
|
||||
"resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-4.2.1.tgz",
|
||||
"integrity": "sha512-Uhdk5sfqcee/9H/rCOJikYz67o0a2Tw2hGRPOG2Y1R2dg7brRe1uG0yaNQDHu+TO/uQPF/5eCapvYSmHUjt7JQ==",
|
||||
"license": "Apache-2.0",
|
||||
"engines": {
|
||||
"node": "^18.18.0 || ^20.9.0 || >=21.1.0"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://opencollective.com/eslint"
|
||||
}
|
||||
},
|
||||
"node_modules/@stylistic/eslint-plugin/node_modules/picomatch": {
|
||||
"version": "4.0.3",
|
||||
"resolved": "https://registry.npmjs.org/picomatch/-/picomatch-4.0.3.tgz",
|
||||
"integrity": "sha512-5gTmgEY/sqK6gFXLIsQNH19lWb4ebPDLA4SdLP7dsWkIXHWlG66oPuVvXSGFPppYZz8ZDZq0dYYrbHfBCVUb1Q==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=12"
|
||||
},
|
||||
"funding": {
|
||||
"url": "https://github.com/sponsors/jonschlinkert"
|
||||
}
|
||||
},
|
||||
"node_modules/@swc/core": {
|
||||
"version": "1.13.2",
|
||||
"resolved": "https://registry.npmjs.org/@swc/core/-/core-1.13.2.tgz",
|
||||
|
||||
@@ -46,6 +46,7 @@
|
||||
"@octokit/core": "^7.0.3",
|
||||
"@octokit/plugin-paginate-rest": "^13.1.1",
|
||||
"@octokit/plugin-rest-endpoint-methods": "^16.0.0",
|
||||
"@stylistic/eslint-plugin": "^5.2.2",
|
||||
"@swc/core": "^1.13.2",
|
||||
"@swc/jest": "^0.2.39",
|
||||
"@types/cli-progress": "^3.11.6",
|
||||
|
||||
@@ -1,216 +1,216 @@
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, ChannelList, Feed } from '../../models'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
type ChoiceValue = { type: string; value?: Feed | Channel }
|
||||
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelList)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const parser = new ChannelsParser({ storage })
|
||||
channelList = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = channelList.channels.filter(
|
||||
(channel: epgGrabber.Channel) => !channel.xmltv_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const channel of parsedChannelsWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(
|
||||
channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId,
|
||||
channelsKeyById
|
||||
)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
channel: epgGrabber.Channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = feedsGroupedByChannelId.has(channelId)
|
||||
? new Collection(feedsGroupedByChannelId.get(channelId))
|
||||
: new Collection()
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string, channelList: ChannelList) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
storage.saveSync(filepath, channelList.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
|
||||
import type { DataProcessorData } from '../../types/dataProcessor'
|
||||
import type { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelSearchableData } from '../../types/channel'
|
||||
import { Channel, ChannelList, Feed } from '../../models'
|
||||
import { DataProcessor, DataLoader } from '../../core'
|
||||
import { select, input } from '@inquirer/prompts'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import nodeCleanup from 'node-cleanup'
|
||||
import sjs from '@freearhey/search-js'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
import readline from 'readline'
|
||||
|
||||
interface ChoiceValue { type: string; value?: Feed | Channel }
|
||||
interface Choice { name: string; short?: string; value: ChoiceValue; default?: boolean }
|
||||
|
||||
if (process.platform === 'win32') {
|
||||
readline
|
||||
.createInterface({
|
||||
input: process.stdin,
|
||||
output: process.stdout
|
||||
})
|
||||
.on('SIGINT', function () {
|
||||
process.emit('SIGINT')
|
||||
})
|
||||
}
|
||||
|
||||
const program = new Command()
|
||||
|
||||
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
|
||||
|
||||
const filepath = program.args[0]
|
||||
const logger = new Logger()
|
||||
const storage = new Storage()
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
|
||||
main(filepath)
|
||||
nodeCleanup(() => {
|
||||
save(filepath, channelList)
|
||||
})
|
||||
|
||||
export default async function main(filepath: string) {
|
||||
if (!(await storage.exists(filepath))) {
|
||||
throw new Error(`File "${filepath}" does not exists`)
|
||||
}
|
||||
|
||||
logger.info('loading data from api...')
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channels, channelsKeyById, feedsGroupedByChannelId }: DataProcessorData =
|
||||
processor.process(data)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const parser = new ChannelsParser({ storage })
|
||||
channelList = await parser.parse(filepath)
|
||||
const parsedChannelsWithoutId = channelList.channels.filter(
|
||||
(channel: epgGrabber.Channel) => !channel.xmltv_id
|
||||
)
|
||||
|
||||
logger.info(
|
||||
`found ${channelList.channels.count()} channels (including ${parsedChannelsWithoutId.count()} without ID)`
|
||||
)
|
||||
|
||||
logger.info('creating search index...')
|
||||
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
|
||||
const searchIndex = sjs.createIndex(items, {
|
||||
searchable: ['name', 'altNames', 'guideNames', 'streamNames', 'feedFullNames']
|
||||
})
|
||||
|
||||
logger.info('starting...\n')
|
||||
|
||||
for (const channel of parsedChannelsWithoutId.all()) {
|
||||
try {
|
||||
channel.xmltv_id = await selectChannel(
|
||||
channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId,
|
||||
channelsKeyById
|
||||
)
|
||||
} catch (err) {
|
||||
logger.info(err.message)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
parsedChannelsWithoutId.forEach((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id === '-') {
|
||||
channel.xmltv_id = ''
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async function selectChannel(
|
||||
channel: epgGrabber.Channel,
|
||||
searchIndex,
|
||||
feedsGroupedByChannelId: Dictionary,
|
||||
channelsKeyById: Dictionary
|
||||
): Promise<string> {
|
||||
const query = escapeRegex(channel.name)
|
||||
const similarChannels = searchIndex
|
||||
.search(query)
|
||||
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select channel ID for "${channel.name}" (${channel.site_id}):`,
|
||||
choices: getChannelChoises(new Collection(similarChannels)),
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type': {
|
||||
const typedChannelId = await input({ message: ' Channel ID:' })
|
||||
if (!typedChannelId) return ''
|
||||
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return typedChannelId
|
||||
return [typedChannelId, selectedFeedId].join('@')
|
||||
}
|
||||
case 'channel': {
|
||||
const selectedChannel = selected.value
|
||||
if (!selectedChannel) return ''
|
||||
const selectedFeedId = await selectFeed(selectedChannel.id || '', feedsGroupedByChannelId)
|
||||
if (selectedFeedId === '-') return selectedChannel.id || ''
|
||||
return [selectedChannel.id, selectedFeedId].join('@')
|
||||
}
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
|
||||
const channelFeeds = feedsGroupedByChannelId.has(channelId)
|
||||
? new Collection(feedsGroupedByChannelId.get(channelId))
|
||||
: new Collection()
|
||||
const choices = getFeedChoises(channelFeeds)
|
||||
|
||||
const selected: ChoiceValue = await select({
|
||||
message: `Select feed ID for "${channelId}":`,
|
||||
choices,
|
||||
pageSize: 10
|
||||
})
|
||||
|
||||
switch (selected.type) {
|
||||
case 'skip':
|
||||
return '-'
|
||||
case 'type':
|
||||
return await input({ message: ' Feed ID:', default: 'SD' })
|
||||
case 'feed':
|
||||
const selectedFeed = selected.value
|
||||
if (!selectedFeed) return ''
|
||||
return selectedFeed.id || ''
|
||||
}
|
||||
|
||||
return ''
|
||||
}
|
||||
|
||||
function getChannelChoises(channels: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
channels.forEach((channel: Channel) => {
|
||||
const names = new Collection([channel.name, ...channel.getAltNames().all()]).uniq().join(', ')
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'channel',
|
||||
value: channel
|
||||
},
|
||||
name: `${channel.id} (${names})`,
|
||||
short: `${channel.id}`
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function getFeedChoises(feeds: Collection): Choice[] {
|
||||
const choises: Choice[] = []
|
||||
|
||||
feeds.forEach((feed: Feed) => {
|
||||
let name = `${feed.id} (${feed.name})`
|
||||
if (feed.isMain) name += ' [main]'
|
||||
|
||||
choises.push({
|
||||
value: {
|
||||
type: 'feed',
|
||||
value: feed
|
||||
},
|
||||
default: feed.isMain,
|
||||
name,
|
||||
short: feed.id
|
||||
})
|
||||
})
|
||||
|
||||
choises.push({ name: 'Type...', value: { type: 'type' } })
|
||||
choises.push({ name: 'Skip', value: { type: 'skip' } })
|
||||
|
||||
return choises
|
||||
}
|
||||
|
||||
function save(filepath: string, channelList: ChannelList) {
|
||||
if (!storage.existsSync(filepath)) return
|
||||
storage.saveSync(filepath, channelList.toString())
|
||||
logger.info(`\nFile '${filepath}' successfully saved`)
|
||||
}
|
||||
|
||||
function escapeRegex(string: string) {
|
||||
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
|
||||
}
|
||||
|
||||
@@ -1,88 +1,86 @@
|
||||
import { Logger, File, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { ChannelList } from '../../models'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
type ParseOptions = {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
channelList = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: {
|
||||
[key: string]: string
|
||||
} = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
|
||||
channel.site = config.site
|
||||
|
||||
return channel
|
||||
})
|
||||
|
||||
const newChannelList = new ChannelList({ channels: [] })
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sort()
|
||||
|
||||
await storage.save(outputFilepath, newChannelList.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Logger, File, Storage } from '@freearhey/core'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import { ChannelList } from '../../models'
|
||||
import { pathToFileURL } from 'node:url'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Command } from 'commander'
|
||||
|
||||
const program = new Command()
|
||||
program
|
||||
.requiredOption('-c, --config <config>', 'Config file')
|
||||
.option('-s, --set [args...]', 'Set custom arguments')
|
||||
.option('-o, --output <output>', 'Output file')
|
||||
.parse(process.argv)
|
||||
|
||||
interface ParseOptions {
|
||||
config: string
|
||||
set?: string
|
||||
output?: string
|
||||
clean?: boolean
|
||||
}
|
||||
|
||||
const options: ParseOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
function isPromise(promise: object[] | Promise<object[]>) {
|
||||
return (
|
||||
!!promise &&
|
||||
typeof promise === 'object' &&
|
||||
typeof (promise as Promise<object[]>).then === 'function'
|
||||
)
|
||||
}
|
||||
|
||||
const storage = new Storage()
|
||||
const logger = new Logger()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
const file = new File(options.config)
|
||||
const dir = file.dirname()
|
||||
const config = (await import(pathToFileURL(options.config).toString())).default
|
||||
const outputFilepath = options.output || `${dir}/${config.site}.channels.xml`
|
||||
|
||||
let channelList = new ChannelList({ channels: [] })
|
||||
if (await storage.exists(outputFilepath)) {
|
||||
channelList = await parser.parse(outputFilepath)
|
||||
}
|
||||
|
||||
const args: Record<string, string> = {}
|
||||
|
||||
if (Array.isArray(options.set)) {
|
||||
options.set.forEach((arg: string) => {
|
||||
const [key, value] = arg.split(':')
|
||||
args[key] = value
|
||||
})
|
||||
}
|
||||
|
||||
let parsedChannels = config.channels(args)
|
||||
if (isPromise(parsedChannels)) {
|
||||
parsedChannels = await parsedChannels
|
||||
}
|
||||
parsedChannels = parsedChannels.map((channel: epgGrabber.Channel) => {
|
||||
channel.site = config.site
|
||||
|
||||
return channel
|
||||
})
|
||||
|
||||
const newChannelList = new ChannelList({ channels: [] })
|
||||
parsedChannels.forEach((channel: epgGrabber.Channel) => {
|
||||
if (!channel.site_id) return
|
||||
|
||||
const found: epgGrabber.Channel | undefined = channelList.get(channel.site_id)
|
||||
|
||||
if (found) {
|
||||
channel.xmltv_id = found.xmltv_id
|
||||
channel.lang = found.lang
|
||||
}
|
||||
|
||||
newChannelList.add(channel)
|
||||
})
|
||||
|
||||
newChannelList.sort()
|
||||
|
||||
await storage.save(outputFilepath, newChannelList.toString())
|
||||
|
||||
logger.info(`File '${outputFilepath}' successfully saved`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,92 +1,92 @@
|
||||
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Storage, Dictionary, File } from '@freearhey/core'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelList } from '../../models'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
type ValidationError = {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang?: string
|
||||
xmltv_id?: string
|
||||
site_id?: string
|
||||
logo?: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
|
||||
const parser = new ChannelsParser({
|
||||
storage: new Storage()
|
||||
})
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
if (totalErrors > 0) {
|
||||
console.log(chalk.red(`${totalErrors} error(s) in ${totalFiles} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import { ChannelsParser, DataLoader, DataProcessor } from '../../core'
|
||||
import { DataProcessorData } from '../../types/dataProcessor'
|
||||
import { Storage, Dictionary, File } from '@freearhey/core'
|
||||
import { DataLoaderData } from '../../types/dataLoader'
|
||||
import { ChannelList } from '../../models'
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { program } from 'commander'
|
||||
import chalk from 'chalk'
|
||||
import langs from 'langs'
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to validate').parse(process.argv)
|
||||
|
||||
interface ValidationError {
|
||||
type: 'duplicate' | 'wrong_channel_id' | 'wrong_feed_id' | 'wrong_lang'
|
||||
name: string
|
||||
lang?: string
|
||||
xmltv_id?: string
|
||||
site_id?: string
|
||||
logo?: string
|
||||
}
|
||||
|
||||
async function main() {
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { channelsKeyById, feedsKeyByStreamId }: DataProcessorData = processor.process(data)
|
||||
const parser = new ChannelsParser({
|
||||
storage: new Storage()
|
||||
})
|
||||
|
||||
let totalFiles = 0
|
||||
let totalErrors = 0
|
||||
|
||||
const storage = new Storage()
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
const bufferBySiteId = new Dictionary()
|
||||
const errors: ValidationError[] = []
|
||||
channelList.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const bufferId: string = channel.site_id
|
||||
if (bufferBySiteId.missing(bufferId)) {
|
||||
bufferBySiteId.set(bufferId, true)
|
||||
} else {
|
||||
errors.push({ type: 'duplicate', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!langs.where('1', channel.lang ?? '')) {
|
||||
errors.push({ type: 'wrong_lang', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (!channel.xmltv_id) return
|
||||
const [channelId, feedId] = channel.xmltv_id.split('@')
|
||||
|
||||
const foundChannel = channelsKeyById.get(channelId)
|
||||
if (!foundChannel) {
|
||||
errors.push({ type: 'wrong_channel_id', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
|
||||
if (feedId) {
|
||||
const foundFeed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (!foundFeed) {
|
||||
errors.push({ type: 'wrong_feed_id', ...channel })
|
||||
totalErrors++
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.underline(filepath))
|
||||
console.table(errors, ['type', 'lang', 'xmltv_id', 'site_id', 'name'])
|
||||
console.log()
|
||||
totalFiles++
|
||||
}
|
||||
}
|
||||
|
||||
if (totalErrors > 0) {
|
||||
console.log(chalk.red(`${totalErrors} error(s) in ${totalFiles} file(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,133 +1,133 @@
|
||||
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
|
||||
import { QueueCreator, Job, ChannelsParser } from '../../core'
|
||||
import { Option, program } from 'commander'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { ChannelList } from '../../models'
|
||||
|
||||
program
|
||||
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'-c, --channels <path>',
|
||||
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
|
||||
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
|
||||
.addOption(
|
||||
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request').env(
|
||||
'TIMEOUT'
|
||||
)
|
||||
)
|
||||
.addOption(
|
||||
new Option('-d, --delay <milliseconds>', 'Override the default delay between request').env(
|
||||
'DELAY'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
|
||||
)
|
||||
.argParser(value => parseInt(value))
|
||||
.env('DAYS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
|
||||
.default(1)
|
||||
.env('MAX_CONNECTIONS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--gzip', 'Create a compressed version of the guide as well')
|
||||
.default(false)
|
||||
.env('GZIP')
|
||||
)
|
||||
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
|
||||
.parse()
|
||||
|
||||
export type GrabOptions = {
|
||||
site?: string
|
||||
channels?: string
|
||||
output: string
|
||||
gzip: boolean
|
||||
curl: boolean
|
||||
maxConnections: number
|
||||
timeout?: string
|
||||
delay?: string
|
||||
lang?: string
|
||||
days?: number
|
||||
proxy?: string
|
||||
}
|
||||
|
||||
const options: GrabOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!options.site && !options.channels)
|
||||
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
|
||||
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('starting...')
|
||||
|
||||
logger.info('config:')
|
||||
logger.tree(options)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
|
||||
let files: string[] = []
|
||||
if (options.site) {
|
||||
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
files = await storage.list(pattern)
|
||||
} else if (options.channels) {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
channels = channels.concat(channelList.channels)
|
||||
}
|
||||
|
||||
if (options.lang) {
|
||||
channels = channels.filter((channel: Channel) => {
|
||||
if (!options.lang || !channel.lang) return true
|
||||
|
||||
return options.lang.includes(channel.lang)
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(` found ${channels.count()} channel(s)`)
|
||||
|
||||
logger.info('run:')
|
||||
runJob({ logger, channels })
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) {
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
|
||||
const queueCreator = new QueueCreator({
|
||||
channels,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
const queue = await queueCreator.create()
|
||||
const job = new Job({
|
||||
queue,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
|
||||
await job.run()
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
import { Logger, Timer, Storage, Collection } from '@freearhey/core'
|
||||
import { QueueCreator, Job, ChannelsParser } from '../../core'
|
||||
import { Option, program } from 'commander'
|
||||
import { SITES_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
import { ChannelList } from '../../models'
|
||||
|
||||
program
|
||||
.addOption(new Option('-s, --site <name>', 'Name of the site to parse'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'-c, --channels <path>',
|
||||
'Path to *.channels.xml file (required if the "--site" attribute is not specified)'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-o, --output <path>', 'Path to output file').default('guide.xml'))
|
||||
.addOption(new Option('-l, --lang <codes>', 'Filter channels by languages (ISO 639-1 codes)'))
|
||||
.addOption(
|
||||
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request').env(
|
||||
'TIMEOUT'
|
||||
)
|
||||
)
|
||||
.addOption(
|
||||
new Option('-d, --delay <milliseconds>', 'Override the default delay between request').env(
|
||||
'DELAY'
|
||||
)
|
||||
)
|
||||
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
|
||||
.addOption(
|
||||
new Option(
|
||||
'--days <days>',
|
||||
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
|
||||
)
|
||||
.argParser(value => parseInt(value))
|
||||
.env('DAYS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
|
||||
.default(1)
|
||||
.env('MAX_CONNECTIONS')
|
||||
)
|
||||
.addOption(
|
||||
new Option('--gzip', 'Create a compressed version of the guide as well')
|
||||
.default(false)
|
||||
.env('GZIP')
|
||||
)
|
||||
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
|
||||
.parse()
|
||||
|
||||
export interface GrabOptions {
|
||||
site?: string
|
||||
channels?: string
|
||||
output: string
|
||||
gzip: boolean
|
||||
curl: boolean
|
||||
maxConnections: number
|
||||
timeout?: string
|
||||
delay?: string
|
||||
lang?: string
|
||||
days?: number
|
||||
proxy?: string
|
||||
}
|
||||
|
||||
const options: GrabOptions = program.opts()
|
||||
|
||||
async function main() {
|
||||
if (!options.site && !options.channels)
|
||||
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
|
||||
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('starting...')
|
||||
|
||||
logger.info('config:')
|
||||
logger.tree(options)
|
||||
|
||||
logger.info('loading channels...')
|
||||
const storage = new Storage()
|
||||
const parser = new ChannelsParser({ storage })
|
||||
|
||||
let files: string[] = []
|
||||
if (options.site) {
|
||||
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
|
||||
pattern = pattern.replace(/\\/g, '/')
|
||||
files = await storage.list(pattern)
|
||||
} else if (options.channels) {
|
||||
files = await storage.list(options.channels)
|
||||
}
|
||||
|
||||
let channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await parser.parse(filepath)
|
||||
|
||||
channels = channels.concat(channelList.channels)
|
||||
}
|
||||
|
||||
if (options.lang) {
|
||||
channels = channels.filter((channel: Channel) => {
|
||||
if (!options.lang || !channel.lang) return true
|
||||
|
||||
return options.lang.includes(channel.lang)
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(` found ${channels.count()} channel(s)`)
|
||||
|
||||
logger.info('run:')
|
||||
runJob({ logger, channels })
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
async function runJob({ logger, channels }: { logger: Logger; channels: Collection }) {
|
||||
const timer = new Timer()
|
||||
timer.start()
|
||||
|
||||
const queueCreator = new QueueCreator({
|
||||
channels,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
const queue = await queueCreator.create()
|
||||
const job = new Job({
|
||||
queue,
|
||||
logger,
|
||||
options
|
||||
})
|
||||
|
||||
await job.run()
|
||||
|
||||
logger.success(` done in ${timer.format('HH[h] mm[m] ss[s]')}`)
|
||||
}
|
||||
|
||||
@@ -1,22 +1,22 @@
|
||||
import { parseChannels } from 'epg-grabber'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ChannelList } from '../models'
|
||||
|
||||
type ChannelsParserProps = {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export class ChannelsParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: ChannelsParserProps) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string): Promise<ChannelList> {
|
||||
const content = await this.storage.load(filepath)
|
||||
const parsed = parseChannels(content)
|
||||
|
||||
return new ChannelList({ channels: parsed })
|
||||
}
|
||||
}
|
||||
import { parseChannels } from 'epg-grabber'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ChannelList } from '../models'
|
||||
|
||||
interface ChannelsParserProps {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export class ChannelsParser {
|
||||
storage: Storage
|
||||
|
||||
constructor({ storage }: ChannelsParserProps) {
|
||||
this.storage = storage
|
||||
}
|
||||
|
||||
async parse(filepath: string): Promise<ChannelList> {
|
||||
const content = await this.storage.load(filepath)
|
||||
const parsed = parseChannels(content)
|
||||
|
||||
return new ChannelList({ channels: parsed })
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,56 +1,55 @@
|
||||
import { Channel, Feed, GuideChannel, Logo, Stream } from '../models'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export class DataProcessor {
|
||||
constructor() {}
|
||||
|
||||
process(data: DataLoaderData) {
|
||||
let channels = new Collection(data.channels).map(data => new Channel(data))
|
||||
const channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
const guideChannels = new Collection(data.guides).map(data => new GuideChannel(data))
|
||||
const guideChannelsGroupedByStreamId = guideChannels.groupBy((channel: GuideChannel) =>
|
||||
channel.getStreamId()
|
||||
)
|
||||
|
||||
const streams = new Collection(data.streams).map(data => new Stream(data))
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
let feeds = new Collection(data.feeds).map(data =>
|
||||
new Feed(data)
|
||||
.withGuideChannels(guideChannelsGroupedByStreamId)
|
||||
.withStreams(streamsGroupedById)
|
||||
.withChannel(channelsKeyById)
|
||||
)
|
||||
const feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
|
||||
|
||||
const logos = new Collection(data.logos).map(data =>
|
||||
new Logo(data).withFeed(feedsKeyByStreamId)
|
||||
)
|
||||
const logosGroupedByChannelId = logos.groupBy((logo: Logo) => logo.channelId)
|
||||
const logosGroupedByStreamId = logos.groupBy((logo: Logo) => logo.getStreamId())
|
||||
|
||||
feeds = feeds.map((feed: Feed) => feed.withLogos(logosGroupedByStreamId))
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||
|
||||
channels = channels.map((channel: Channel) =>
|
||||
channel.withFeeds(feedsGroupedByChannelId).withLogos(logosGroupedByChannelId)
|
||||
)
|
||||
|
||||
return {
|
||||
guideChannelsGroupedByStreamId,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
streamsGroupedById,
|
||||
feedsKeyByStreamId,
|
||||
channelsKeyById,
|
||||
guideChannels,
|
||||
channels,
|
||||
streams,
|
||||
feeds,
|
||||
logos
|
||||
}
|
||||
}
|
||||
}
|
||||
import { Channel, Feed, GuideChannel, Logo, Stream } from '../models'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export class DataProcessor {
|
||||
|
||||
process(data: DataLoaderData) {
|
||||
let channels = new Collection(data.channels).map(data => new Channel(data))
|
||||
const channelsKeyById = channels.keyBy((channel: Channel) => channel.id)
|
||||
|
||||
const guideChannels = new Collection(data.guides).map(data => new GuideChannel(data))
|
||||
const guideChannelsGroupedByStreamId = guideChannels.groupBy((channel: GuideChannel) =>
|
||||
channel.getStreamId()
|
||||
)
|
||||
|
||||
const streams = new Collection(data.streams).map(data => new Stream(data))
|
||||
const streamsGroupedById = streams.groupBy((stream: Stream) => stream.getId())
|
||||
|
||||
let feeds = new Collection(data.feeds).map(data =>
|
||||
new Feed(data)
|
||||
.withGuideChannels(guideChannelsGroupedByStreamId)
|
||||
.withStreams(streamsGroupedById)
|
||||
.withChannel(channelsKeyById)
|
||||
)
|
||||
const feedsKeyByStreamId = feeds.keyBy((feed: Feed) => feed.getStreamId())
|
||||
|
||||
const logos = new Collection(data.logos).map(data =>
|
||||
new Logo(data).withFeed(feedsKeyByStreamId)
|
||||
)
|
||||
const logosGroupedByChannelId = logos.groupBy((logo: Logo) => logo.channelId)
|
||||
const logosGroupedByStreamId = logos.groupBy((logo: Logo) => logo.getStreamId())
|
||||
|
||||
feeds = feeds.map((feed: Feed) => feed.withLogos(logosGroupedByStreamId))
|
||||
const feedsGroupedByChannelId = feeds.groupBy((feed: Feed) => feed.channelId)
|
||||
|
||||
channels = channels.map((channel: Channel) =>
|
||||
channel.withFeeds(feedsGroupedByChannelId).withLogos(logosGroupedByChannelId)
|
||||
)
|
||||
|
||||
return {
|
||||
guideChannelsGroupedByStreamId,
|
||||
feedsGroupedByChannelId,
|
||||
logosGroupedByChannelId,
|
||||
logosGroupedByStreamId,
|
||||
streamsGroupedById,
|
||||
feedsKeyByStreamId,
|
||||
channelsKeyById,
|
||||
guideChannels,
|
||||
channels,
|
||||
streams,
|
||||
feeds,
|
||||
logos
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,105 +1,105 @@
|
||||
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Queue, ProxyParser } from './'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { TaskQueue, PromisyClass } from 'cwait'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
|
||||
type GrabberProps = {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
}
|
||||
|
||||
export class Grabber {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
grabber: EPGGrabber | EPGGrabberMock
|
||||
|
||||
constructor({ logger, queue, options }: GrabberProps) {
|
||||
this.logger = logger
|
||||
this.queue = queue
|
||||
this.options = options
|
||||
this.grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
|
||||
}
|
||||
|
||||
async grab(): Promise<{ channels: Collection; programs: Collection }> {
|
||||
const proxyParser = new ProxyParser()
|
||||
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
|
||||
|
||||
const total = this.queue.size()
|
||||
|
||||
const channels = new Collection()
|
||||
let programs = new Collection()
|
||||
let i = 1
|
||||
|
||||
await Promise.all(
|
||||
this.queue.items().map(
|
||||
taskQueue.wrap(
|
||||
async (queueItem: { channel: Channel; config: SiteConfig; date: string }) => {
|
||||
const { channel, config, date } = queueItem
|
||||
|
||||
channels.add(channel)
|
||||
|
||||
if (this.options.timeout !== undefined) {
|
||||
const timeout = parseInt(this.options.timeout)
|
||||
config.request = { ...config.request, ...{ timeout } }
|
||||
}
|
||||
|
||||
if (this.options.delay !== undefined) {
|
||||
const delay = parseInt(this.options.delay)
|
||||
config.delay = delay
|
||||
}
|
||||
|
||||
if (this.options.proxy !== undefined) {
|
||||
const proxy = proxyParser.parse(this.options.proxy)
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(this.options.proxy)
|
||||
|
||||
config.request = {
|
||||
...config.request,
|
||||
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
|
||||
}
|
||||
} else {
|
||||
config.request = { ...config.request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.curl === true) {
|
||||
config.curl = true
|
||||
}
|
||||
|
||||
const _programs = await this.grabber.grab(
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
(data: GrabCallbackData, error: Error | null) => {
|
||||
const { programs, date } = data
|
||||
|
||||
this.logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${
|
||||
channel.xmltv_id
|
||||
} - ${date.format('MMM D, YYYY')} (${programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (error) {
|
||||
this.logger.info(` ERR: ${error.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
programs = programs.concat(new Collection(_programs))
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return { channels, programs }
|
||||
}
|
||||
}
|
||||
import { EPGGrabber, GrabCallbackData, EPGGrabberMock, SiteConfig, Channel } from 'epg-grabber'
|
||||
import { Logger, Collection } from '@freearhey/core'
|
||||
import { Queue, ProxyParser } from './'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { TaskQueue, PromisyClass } from 'cwait'
|
||||
import { SocksProxyAgent } from 'socks-proxy-agent'
|
||||
|
||||
interface GrabberProps {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
}
|
||||
|
||||
export class Grabber {
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
options: GrabOptions
|
||||
grabber: EPGGrabber | EPGGrabberMock
|
||||
|
||||
constructor({ logger, queue, options }: GrabberProps) {
|
||||
this.logger = logger
|
||||
this.queue = queue
|
||||
this.options = options
|
||||
this.grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
|
||||
}
|
||||
|
||||
async grab(): Promise<{ channels: Collection; programs: Collection }> {
|
||||
const proxyParser = new ProxyParser()
|
||||
const taskQueue = new TaskQueue(Promise as PromisyClass, this.options.maxConnections)
|
||||
|
||||
const total = this.queue.size()
|
||||
|
||||
const channels = new Collection()
|
||||
let programs = new Collection()
|
||||
let i = 1
|
||||
|
||||
await Promise.all(
|
||||
this.queue.items().map(
|
||||
taskQueue.wrap(
|
||||
async (queueItem: { channel: Channel; config: SiteConfig; date: string }) => {
|
||||
const { channel, config, date } = queueItem
|
||||
|
||||
channels.add(channel)
|
||||
|
||||
if (this.options.timeout !== undefined) {
|
||||
const timeout = parseInt(this.options.timeout)
|
||||
config.request = { ...config.request, ...{ timeout } }
|
||||
}
|
||||
|
||||
if (this.options.delay !== undefined) {
|
||||
const delay = parseInt(this.options.delay)
|
||||
config.delay = delay
|
||||
}
|
||||
|
||||
if (this.options.proxy !== undefined) {
|
||||
const proxy = proxyParser.parse(this.options.proxy)
|
||||
|
||||
if (
|
||||
proxy.protocol &&
|
||||
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
|
||||
) {
|
||||
const socksProxyAgent = new SocksProxyAgent(this.options.proxy)
|
||||
|
||||
config.request = {
|
||||
...config.request,
|
||||
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
|
||||
}
|
||||
} else {
|
||||
config.request = { ...config.request, ...{ proxy } }
|
||||
}
|
||||
}
|
||||
|
||||
if (this.options.curl === true) {
|
||||
config.curl = true
|
||||
}
|
||||
|
||||
const _programs = await this.grabber.grab(
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
(data: GrabCallbackData, error: Error | null) => {
|
||||
const { programs, date } = data
|
||||
|
||||
this.logger.info(
|
||||
` [${i}/${total}] ${channel.site} (${channel.lang}) - ${
|
||||
channel.xmltv_id
|
||||
} - ${date.format('MMM D, YYYY')} (${programs.length} programs)`
|
||||
)
|
||||
if (i < total) i++
|
||||
|
||||
if (error) {
|
||||
this.logger.info(` ERR: ${error.message}`)
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
programs = programs.concat(new Collection(_programs))
|
||||
}
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
return { channels, programs }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,111 +1,111 @@
|
||||
import { Collection, Logger, Zip, Storage, StringTemplate } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { OptionValues } from 'commander'
|
||||
import { Channel, Feed, Guide } from '../models'
|
||||
import path from 'path'
|
||||
import { DataLoader, DataProcessor } from '.'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { DataProcessorData } from '../types/dataProcessor'
|
||||
import { DATA_DIR } from '../constants'
|
||||
|
||||
type GuideManagerProps = {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
}
|
||||
|
||||
export class GuideManager {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
|
||||
constructor({ channels, programs, logger, options }: GuideManagerProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
}
|
||||
|
||||
async createGuides() {
|
||||
const pathTemplate = new StringTemplate(this.options.output)
|
||||
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { feedsKeyByStreamId, channelsKeyById }: DataProcessorData = processor.process(data)
|
||||
|
||||
const groupedChannels = this.channels
|
||||
.map((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id && !channel.icon) {
|
||||
const foundFeed: Feed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (foundFeed && foundFeed.hasLogo()) {
|
||||
channel.icon = foundFeed.getLogoUrl()
|
||||
} else {
|
||||
const [channelId] = channel.xmltv_id.split('@')
|
||||
const foundChannel: Channel = channelsKeyById.get(channelId)
|
||||
if (foundChannel && foundChannel.hasLogo()) {
|
||||
channel.icon = foundChannel.getLogoUrl()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return channel
|
||||
})
|
||||
.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.index,
|
||||
(channel: epgGrabber.Channel) => channel.xmltv_id
|
||||
])
|
||||
.uniqBy(
|
||||
(channel: epgGrabber.Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`
|
||||
)
|
||||
.groupBy((channel: epgGrabber.Channel) => {
|
||||
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
|
||||
})
|
||||
|
||||
const groupedPrograms = this.programs
|
||||
.orderBy([
|
||||
(program: epgGrabber.Program) => program.channel,
|
||||
(program: epgGrabber.Program) => program.start
|
||||
])
|
||||
.groupBy((program: epgGrabber.Program) => {
|
||||
const lang =
|
||||
program.titles && program.titles.length && program.titles[0].lang
|
||||
? program.titles[0].lang
|
||||
: 'en'
|
||||
|
||||
return pathTemplate.format({ lang, site: program.site || '' })
|
||||
})
|
||||
|
||||
for (const groupKey of groupedPrograms.keys()) {
|
||||
const guide = new Guide({
|
||||
filepath: groupKey,
|
||||
gzip: this.options.gzip,
|
||||
channels: new Collection(groupedChannels.get(groupKey)),
|
||||
programs: new Collection(groupedPrograms.get(groupKey))
|
||||
})
|
||||
|
||||
await this.save(guide)
|
||||
}
|
||||
}
|
||||
|
||||
async save(guide: Guide) {
|
||||
const storage = new Storage(path.dirname(guide.filepath))
|
||||
const xmlFilepath = guide.filepath
|
||||
const xmlFilename = path.basename(xmlFilepath)
|
||||
this.logger.info(` saving to "${xmlFilepath}"...`)
|
||||
const xmltv = guide.toString()
|
||||
await storage.save(xmlFilename, xmltv)
|
||||
|
||||
if (guide.gzip) {
|
||||
const zip = new Zip()
|
||||
const compressed = zip.compress(xmltv)
|
||||
const gzFilepath = `${guide.filepath}.gz`
|
||||
const gzFilename = path.basename(gzFilepath)
|
||||
this.logger.info(` saving to "${gzFilepath}"...`)
|
||||
await storage.save(gzFilename, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
import { Collection, Logger, Zip, Storage, StringTemplate } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { OptionValues } from 'commander'
|
||||
import { Channel, Feed, Guide } from '../models'
|
||||
import path from 'path'
|
||||
import { DataLoader, DataProcessor } from '.'
|
||||
import { DataLoaderData } from '../types/dataLoader'
|
||||
import { DataProcessorData } from '../types/dataProcessor'
|
||||
import { DATA_DIR } from '../constants'
|
||||
|
||||
interface GuideManagerProps {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
}
|
||||
|
||||
export class GuideManager {
|
||||
options: OptionValues
|
||||
logger: Logger
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
|
||||
constructor({ channels, programs, logger, options }: GuideManagerProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
}
|
||||
|
||||
async createGuides() {
|
||||
const pathTemplate = new StringTemplate(this.options.output)
|
||||
|
||||
const processor = new DataProcessor()
|
||||
const dataStorage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage: dataStorage })
|
||||
const data: DataLoaderData = await loader.load()
|
||||
const { feedsKeyByStreamId, channelsKeyById }: DataProcessorData = processor.process(data)
|
||||
|
||||
const groupedChannels = this.channels
|
||||
.map((channel: epgGrabber.Channel) => {
|
||||
if (channel.xmltv_id && !channel.icon) {
|
||||
const foundFeed: Feed = feedsKeyByStreamId.get(channel.xmltv_id)
|
||||
if (foundFeed && foundFeed.hasLogo()) {
|
||||
channel.icon = foundFeed.getLogoUrl()
|
||||
} else {
|
||||
const [channelId] = channel.xmltv_id.split('@')
|
||||
const foundChannel: Channel = channelsKeyById.get(channelId)
|
||||
if (foundChannel && foundChannel.hasLogo()) {
|
||||
channel.icon = foundChannel.getLogoUrl()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return channel
|
||||
})
|
||||
.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.index,
|
||||
(channel: epgGrabber.Channel) => channel.xmltv_id
|
||||
])
|
||||
.uniqBy(
|
||||
(channel: epgGrabber.Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`
|
||||
)
|
||||
.groupBy((channel: epgGrabber.Channel) => {
|
||||
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
|
||||
})
|
||||
|
||||
const groupedPrograms = this.programs
|
||||
.orderBy([
|
||||
(program: epgGrabber.Program) => program.channel,
|
||||
(program: epgGrabber.Program) => program.start
|
||||
])
|
||||
.groupBy((program: epgGrabber.Program) => {
|
||||
const lang =
|
||||
program.titles && program.titles.length && program.titles[0].lang
|
||||
? program.titles[0].lang
|
||||
: 'en'
|
||||
|
||||
return pathTemplate.format({ lang, site: program.site || '' })
|
||||
})
|
||||
|
||||
for (const groupKey of groupedPrograms.keys()) {
|
||||
const guide = new Guide({
|
||||
filepath: groupKey,
|
||||
gzip: this.options.gzip,
|
||||
channels: new Collection(groupedChannels.get(groupKey)),
|
||||
programs: new Collection(groupedPrograms.get(groupKey))
|
||||
})
|
||||
|
||||
await this.save(guide)
|
||||
}
|
||||
}
|
||||
|
||||
async save(guide: Guide) {
|
||||
const storage = new Storage(path.dirname(guide.filepath))
|
||||
const xmlFilepath = guide.filepath
|
||||
const xmlFilename = path.basename(xmlFilepath)
|
||||
this.logger.info(` saving to "${xmlFilepath}"...`)
|
||||
const xmltv = guide.toString()
|
||||
await storage.save(xmlFilename, xmltv)
|
||||
|
||||
if (guide.gzip) {
|
||||
const zip = new Zip()
|
||||
const compressed = zip.compress(xmltv)
|
||||
const gzFilepath = `${guide.filepath}.gz`
|
||||
const gzFilename = path.basename(gzFilepath)
|
||||
this.logger.info(` saving to "${gzFilepath}"...`)
|
||||
await storage.save(gzFilename, compressed)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,55 +1,55 @@
|
||||
type Column = {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}
|
||||
|
||||
type DataItem = {
|
||||
value: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
|
||||
|
||||
output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
for (const row of this.data) {
|
||||
output += ' <tr>'
|
||||
for (const item of row) {
|
||||
const nowrap = item.nowrap ? ' nowrap' : ''
|
||||
const align = item.align ? ` align="${item.align}"` : ''
|
||||
const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
|
||||
|
||||
output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
interface Column {
|
||||
name: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}
|
||||
|
||||
type DataItem = {
|
||||
value: string
|
||||
nowrap?: boolean
|
||||
align?: string
|
||||
colspan?: number
|
||||
}[]
|
||||
|
||||
export class HTMLTable {
|
||||
data: DataItem[]
|
||||
columns: Column[]
|
||||
|
||||
constructor(data: DataItem[], columns: Column[]) {
|
||||
this.data = data
|
||||
this.columns = columns
|
||||
}
|
||||
|
||||
toString() {
|
||||
let output = '<table>\r\n'
|
||||
|
||||
output += ' <thead>\r\n <tr>'
|
||||
for (const column of this.columns) {
|
||||
const nowrap = column.nowrap ? ' nowrap' : ''
|
||||
const align = column.align ? ` align="${column.align}"` : ''
|
||||
const colspan = column.colspan ? ` colspan="${column.colspan}"` : ''
|
||||
|
||||
output += `<th${align}${nowrap}${colspan}>${column.name}</th>`
|
||||
}
|
||||
output += '</tr>\r\n </thead>\r\n'
|
||||
|
||||
output += ' <tbody>\r\n'
|
||||
for (const row of this.data) {
|
||||
output += ' <tr>'
|
||||
for (const item of row) {
|
||||
const nowrap = item.nowrap ? ' nowrap' : ''
|
||||
const align = item.align ? ` align="${item.align}"` : ''
|
||||
const colspan = item.colspan ? ` colspan="${item.colspan}"` : ''
|
||||
|
||||
output += `<td${align}${nowrap}${colspan}>${item.value}</td>`
|
||||
}
|
||||
output += '</tr>\r\n'
|
||||
}
|
||||
output += ' </tbody>\r\n'
|
||||
|
||||
output += '</table>'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,34 +1,34 @@
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { Queue, Grabber, GuideManager } from '.'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
type JobProps = {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
}
|
||||
|
||||
export class Job {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
grabber: Grabber
|
||||
|
||||
constructor({ queue, logger, options }: JobProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.grabber = new Grabber({ logger, queue, options })
|
||||
}
|
||||
|
||||
async run() {
|
||||
const { channels, programs } = await this.grabber.grab()
|
||||
|
||||
const manager = new GuideManager({
|
||||
channels,
|
||||
programs,
|
||||
options: this.options,
|
||||
logger: this.logger
|
||||
})
|
||||
|
||||
await manager.createGuides()
|
||||
}
|
||||
}
|
||||
import { Logger } from '@freearhey/core'
|
||||
import { Queue, Grabber, GuideManager } from '.'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
|
||||
interface JobProps {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
queue: Queue
|
||||
}
|
||||
|
||||
export class Job {
|
||||
options: GrabOptions
|
||||
logger: Logger
|
||||
grabber: Grabber
|
||||
|
||||
constructor({ queue, logger, options }: JobProps) {
|
||||
this.options = options
|
||||
this.logger = logger
|
||||
this.grabber = new Grabber({ logger, queue, options })
|
||||
}
|
||||
|
||||
async run() {
|
||||
const { channels, programs } = await this.grabber.grab()
|
||||
|
||||
const manager = new GuideManager({
|
||||
channels,
|
||||
programs,
|
||||
options: this.options,
|
||||
logger: this.logger
|
||||
})
|
||||
|
||||
await manager.createGuides()
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,31 +1,31 @@
|
||||
import { URL } from 'node:url'
|
||||
|
||||
type ProxyParserResult = {
|
||||
protocol: string | null
|
||||
auth?: {
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
host: string
|
||||
port: number | null
|
||||
}
|
||||
|
||||
export class ProxyParser {
|
||||
parse(_url: string): ProxyParserResult {
|
||||
const parsed = new URL(_url)
|
||||
|
||||
const result: ProxyParserResult = {
|
||||
protocol: parsed.protocol.replace(':', '') || null,
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : null
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
result.auth = {}
|
||||
if (parsed.username) result.auth.username = parsed.username
|
||||
if (parsed.password) result.auth.password = parsed.password
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
import { URL } from 'node:url'
|
||||
|
||||
interface ProxyParserResult {
|
||||
protocol: string | null
|
||||
auth?: {
|
||||
username?: string
|
||||
password?: string
|
||||
}
|
||||
host: string
|
||||
port: number | null
|
||||
}
|
||||
|
||||
export class ProxyParser {
|
||||
parse(_url: string): ProxyParserResult {
|
||||
const parsed = new URL(_url)
|
||||
|
||||
const result: ProxyParserResult = {
|
||||
protocol: parsed.protocol.replace(':', '') || null,
|
||||
host: parsed.hostname,
|
||||
port: parsed.port ? parseInt(parsed.port) : null
|
||||
}
|
||||
|
||||
if (parsed.username || parsed.password) {
|
||||
result.auth = {}
|
||||
if (parsed.username) result.auth.username = parsed.username
|
||||
if (parsed.password) result.auth.password = parsed.password
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,45 +1,45 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { SiteConfig, Channel } from 'epg-grabber'
|
||||
|
||||
export type QueueItem = {
|
||||
channel: Channel
|
||||
date: string
|
||||
config: SiteConfig
|
||||
error: string | null
|
||||
}
|
||||
|
||||
export class Queue {
|
||||
_data: Dictionary
|
||||
|
||||
constructor() {
|
||||
this._data = new Dictionary()
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return this._data.missing(key)
|
||||
}
|
||||
|
||||
add(
|
||||
key: string,
|
||||
{ channel, config, date }: { channel: Channel; date: string | null; config: SiteConfig }
|
||||
) {
|
||||
this._data.set(key, {
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
error: null
|
||||
})
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return Object.values(this._data.data()).length
|
||||
}
|
||||
|
||||
items(): QueueItem[] {
|
||||
return Object.values(this._data.data()) as QueueItem[]
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this.size() === 0
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { SiteConfig, Channel } from 'epg-grabber'
|
||||
|
||||
export interface QueueItem {
|
||||
channel: Channel
|
||||
date: string
|
||||
config: SiteConfig
|
||||
error: string | null
|
||||
}
|
||||
|
||||
export class Queue {
|
||||
_data: Dictionary
|
||||
|
||||
constructor() {
|
||||
this._data = new Dictionary()
|
||||
}
|
||||
|
||||
missing(key: string): boolean {
|
||||
return this._data.missing(key)
|
||||
}
|
||||
|
||||
add(
|
||||
key: string,
|
||||
{ channel, config, date }: { channel: Channel; date: string | null; config: SiteConfig }
|
||||
) {
|
||||
this._data.set(key, {
|
||||
channel,
|
||||
date,
|
||||
config,
|
||||
error: null
|
||||
})
|
||||
}
|
||||
|
||||
size(): number {
|
||||
return Object.values(this._data.data()).length
|
||||
}
|
||||
|
||||
items(): QueueItem[] {
|
||||
return Object.values(this._data.data()) as QueueItem[]
|
||||
}
|
||||
|
||||
isEmpty(): boolean {
|
||||
return this.size() === 0
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +1,63 @@
|
||||
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
||||
import { SITES_DIR, DATA_DIR } from '../constants'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { ConfigLoader, Queue } from './'
|
||||
import { SiteConfig } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
type QueueCreatorProps = {
|
||||
logger: Logger
|
||||
options: GrabOptions
|
||||
channels: Collection
|
||||
}
|
||||
|
||||
export class QueueCreator {
|
||||
configLoader: ConfigLoader
|
||||
logger: Logger
|
||||
sitesStorage: Storage
|
||||
dataStorage: Storage
|
||||
channels: Collection
|
||||
options: GrabOptions
|
||||
|
||||
constructor({ channels, logger, options }: QueueCreatorProps) {
|
||||
this.channels = channels
|
||||
this.logger = logger
|
||||
this.sitesStorage = new Storage()
|
||||
this.dataStorage = new Storage(DATA_DIR)
|
||||
this.options = options
|
||||
this.configLoader = new ConfigLoader()
|
||||
}
|
||||
|
||||
async create(): Promise<Queue> {
|
||||
let index = 0
|
||||
const queue = new Queue()
|
||||
for (const channel of this.channels.all()) {
|
||||
channel.index = index++
|
||||
if (!channel.site || !channel.site_id || !channel.name) continue
|
||||
|
||||
const configPath = path.resolve(SITES_DIR, `${channel.site}/${channel.site}.config.js`)
|
||||
const config: SiteConfig = await this.configLoader.load(configPath)
|
||||
|
||||
if (!channel.xmltv_id) {
|
||||
channel.xmltv_id = channel.site_id
|
||||
}
|
||||
|
||||
const days = this.options.days || config.days || 1
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString())
|
||||
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
|
||||
dates.forEach((date: DateTime) => {
|
||||
const dateString = date.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
if (queue.missing(key)) {
|
||||
queue.add(key, {
|
||||
channel,
|
||||
date: dateString,
|
||||
config
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return queue
|
||||
}
|
||||
}
|
||||
import { Storage, Collection, DateTime, Logger } from '@freearhey/core'
|
||||
import { SITES_DIR, DATA_DIR } from '../constants'
|
||||
import { GrabOptions } from '../commands/epg/grab'
|
||||
import { ConfigLoader, Queue } from './'
|
||||
import { SiteConfig } from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
interface QueueCreatorProps {
|
||||
logger: Logger
|
||||
options: GrabOptions
|
||||
channels: Collection
|
||||
}
|
||||
|
||||
export class QueueCreator {
|
||||
configLoader: ConfigLoader
|
||||
logger: Logger
|
||||
sitesStorage: Storage
|
||||
dataStorage: Storage
|
||||
channels: Collection
|
||||
options: GrabOptions
|
||||
|
||||
constructor({ channels, logger, options }: QueueCreatorProps) {
|
||||
this.channels = channels
|
||||
this.logger = logger
|
||||
this.sitesStorage = new Storage()
|
||||
this.dataStorage = new Storage(DATA_DIR)
|
||||
this.options = options
|
||||
this.configLoader = new ConfigLoader()
|
||||
}
|
||||
|
||||
async create(): Promise<Queue> {
|
||||
let index = 0
|
||||
const queue = new Queue()
|
||||
for (const channel of this.channels.all()) {
|
||||
channel.index = index++
|
||||
if (!channel.site || !channel.site_id || !channel.name) continue
|
||||
|
||||
const configPath = path.resolve(SITES_DIR, `${channel.site}/${channel.site}.config.js`)
|
||||
const config: SiteConfig = await this.configLoader.load(configPath)
|
||||
|
||||
if (!channel.xmltv_id) {
|
||||
channel.xmltv_id = channel.site_id
|
||||
}
|
||||
|
||||
const days = this.options.days || config.days || 1
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString())
|
||||
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
|
||||
dates.forEach((date: DateTime) => {
|
||||
const dateString = date.toJSON()
|
||||
const key = `${channel.site}:${channel.lang}:${channel.xmltv_id}:${dateString}`
|
||||
if (queue.missing(key)) {
|
||||
queue.add(key, {
|
||||
channel,
|
||||
date: dateString,
|
||||
config
|
||||
})
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
return queue
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,77 +1,77 @@
|
||||
/**
|
||||
* Sorts an array by the result of running each element through an iteratee function.
|
||||
* Creates a shallow copy of the array before sorting to avoid mutating the original.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Function} fn - The iteratee function to compute sort values
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}];
|
||||
* sortBy(users, x => x.age); // [{name: 'jane', age: 25}, {name: 'john', age: 30}]
|
||||
*/
|
||||
export const sortBy = <T>(arr: T[], fn: (item: T) => number | string): T[] =>
|
||||
[...arr].sort((a, b) => (fn(a) > fn(b) ? 1 : -1))
|
||||
|
||||
/**
|
||||
* Sorts an array by multiple criteria with customizable sort orders.
|
||||
* Supports ascending (default) and descending order for each criterion.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Array<Function>} fns - Array of iteratee functions to compute sort values
|
||||
* @param {Array<string>} orders - Array of sort orders ('asc' or 'desc'), defaults to all 'asc'
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}, {name: 'bob', age: 30}];
|
||||
* orderBy(users, [x => x.age, x => x.name], ['desc', 'asc']);
|
||||
* // [{name: 'bob', age: 30}, {name: 'john', age: 30}, {name: 'jane', age: 25}]
|
||||
*/
|
||||
export const orderBy = (
|
||||
arr: Array<unknown>,
|
||||
fns: Array<(item: unknown) => string | number>,
|
||||
orders: Array<string> = []
|
||||
): Array<unknown> =>
|
||||
[...arr].sort((a, b) =>
|
||||
fns.reduce(
|
||||
(acc, fn, i) =>
|
||||
acc ||
|
||||
((orders[i] === 'desc' ? fn(b) > fn(a) : fn(a) > fn(b)) ? 1 : fn(a) === fn(b) ? 0 : -1),
|
||||
0
|
||||
)
|
||||
)
|
||||
|
||||
/**
|
||||
* Creates a duplicate-free version of an array using an iteratee function to generate
|
||||
* the criterion by which uniqueness is computed. Only the first occurrence of each
|
||||
* element is kept.
|
||||
*
|
||||
* @param {Array} arr - The array to inspect
|
||||
* @param {Function} fn - The iteratee function to compute uniqueness criterion
|
||||
* @returns {Array} A new duplicate-free array
|
||||
*
|
||||
* @example
|
||||
* const users = [{id: 1, name: 'john'}, {id: 2, name: 'jane'}, {id: 1, name: 'john'}];
|
||||
* uniqBy(users, x => x.id); // [{id: 1, name: 'john'}, {id: 2, name: 'jane'}]
|
||||
*/
|
||||
export const uniqBy = <T>(arr: T[], fn: (item: T) => unknown): T[] =>
|
||||
arr.filter((item, index) => arr.findIndex(x => fn(x) === fn(item)) === index)
|
||||
|
||||
/**
|
||||
* Converts a string to start case (capitalizes the first letter of each word).
|
||||
* Handles camelCase, snake_case, kebab-case, and regular spaces.
|
||||
*
|
||||
* @param {string} str - The string to convert
|
||||
* @returns {string} The start case string
|
||||
*
|
||||
* @example
|
||||
* startCase('hello_world'); // "Hello World"
|
||||
* startCase('helloWorld'); // "Hello World"
|
||||
* startCase('hello-world'); // "Hello World"
|
||||
* startCase('hello world'); // "Hello World"
|
||||
*/
|
||||
export const startCase = (str: string): string =>
|
||||
str
|
||||
.replace(/([a-z])([A-Z])/g, '$1 $2') // Split camelCase
|
||||
.replace(/[_-]/g, ' ') // Replace underscores and hyphens with spaces
|
||||
.replace(/\b\w/g, c => c.toUpperCase()) // Capitalize first letter of each word
|
||||
/**
|
||||
* Sorts an array by the result of running each element through an iteratee function.
|
||||
* Creates a shallow copy of the array before sorting to avoid mutating the original.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Function} fn - The iteratee function to compute sort values
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}];
|
||||
* sortBy(users, x => x.age); // [{name: 'jane', age: 25}, {name: 'john', age: 30}]
|
||||
*/
|
||||
export const sortBy = <T>(arr: T[], fn: (item: T) => number | string): T[] =>
|
||||
[...arr].sort((a, b) => (fn(a) > fn(b) ? 1 : -1))
|
||||
|
||||
/**
|
||||
* Sorts an array by multiple criteria with customizable sort orders.
|
||||
* Supports ascending (default) and descending order for each criterion.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Array<Function>} fns - Array of iteratee functions to compute sort values
|
||||
* @param {Array<string>} orders - Array of sort orders ('asc' or 'desc'), defaults to all 'asc'
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}, {name: 'bob', age: 30}];
|
||||
* orderBy(users, [x => x.age, x => x.name], ['desc', 'asc']);
|
||||
* // [{name: 'bob', age: 30}, {name: 'john', age: 30}, {name: 'jane', age: 25}]
|
||||
*/
|
||||
export const orderBy = (
|
||||
arr: unknown[],
|
||||
fns: ((item: unknown) => string | number)[],
|
||||
orders: string[] = []
|
||||
): unknown[] =>
|
||||
[...arr].sort((a, b) =>
|
||||
fns.reduce(
|
||||
(acc, fn, i) =>
|
||||
acc ||
|
||||
((orders[i] === 'desc' ? fn(b) > fn(a) : fn(a) > fn(b)) ? 1 : fn(a) === fn(b) ? 0 : -1),
|
||||
0
|
||||
)
|
||||
)
|
||||
|
||||
/**
|
||||
* Creates a duplicate-free version of an array using an iteratee function to generate
|
||||
* the criterion by which uniqueness is computed. Only the first occurrence of each
|
||||
* element is kept.
|
||||
*
|
||||
* @param {Array} arr - The array to inspect
|
||||
* @param {Function} fn - The iteratee function to compute uniqueness criterion
|
||||
* @returns {Array} A new duplicate-free array
|
||||
*
|
||||
* @example
|
||||
* const users = [{id: 1, name: 'john'}, {id: 2, name: 'jane'}, {id: 1, name: 'john'}];
|
||||
* uniqBy(users, x => x.id); // [{id: 1, name: 'john'}, {id: 2, name: 'jane'}]
|
||||
*/
|
||||
export const uniqBy = <T>(arr: T[], fn: (item: T) => unknown): T[] =>
|
||||
arr.filter((item, index) => arr.findIndex(x => fn(x) === fn(item)) === index)
|
||||
|
||||
/**
|
||||
* Converts a string to start case (capitalizes the first letter of each word).
|
||||
* Handles camelCase, snake_case, kebab-case, and regular spaces.
|
||||
*
|
||||
* @param {string} str - The string to convert
|
||||
* @returns {string} The start case string
|
||||
*
|
||||
* @example
|
||||
* startCase('hello_world'); // "Hello World"
|
||||
* startCase('helloWorld'); // "Hello World"
|
||||
* startCase('hello-world'); // "Hello World"
|
||||
* startCase('hello world'); // "Hello World"
|
||||
*/
|
||||
export const startCase = (str: string): string =>
|
||||
str
|
||||
.replace(/([a-z])([A-Z])/g, '$1 $2') // Split camelCase
|
||||
.replace(/[_-]/g, ' ') // Replace underscores and hyphens with spaces
|
||||
.replace(/\b\w/g, c => c.toUpperCase()) // Capitalize first letter of each word
|
||||
|
||||
@@ -1,164 +1,164 @@
|
||||
import { ChannelData, ChannelSearchableData } from '../types/channel'
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Stream, Feed, Logo, GuideChannel } from './'
|
||||
|
||||
export class Channel {
|
||||
id?: string
|
||||
name?: string
|
||||
altNames?: Collection
|
||||
network?: string
|
||||
owners?: Collection
|
||||
countryCode?: string
|
||||
subdivisionCode?: string
|
||||
cityName?: string
|
||||
categoryIds?: Collection
|
||||
isNSFW: boolean = false
|
||||
launched?: string
|
||||
closed?: string
|
||||
replacedBy?: string
|
||||
website?: string
|
||||
feeds?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data?: ChannelData) {
|
||||
if (!data) return
|
||||
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.altNames = new Collection(data.alt_names)
|
||||
this.network = data.network || undefined
|
||||
this.owners = new Collection(data.owners)
|
||||
this.countryCode = data.country
|
||||
this.subdivisionCode = data.subdivision || undefined
|
||||
this.cityName = data.city || undefined
|
||||
this.categoryIds = new Collection(data.categories)
|
||||
this.isNSFW = data.is_nsfw
|
||||
this.launched = data.launched || undefined
|
||||
this.closed = data.closed || undefined
|
||||
this.replacedBy = data.replaced_by || undefined
|
||||
this.website = data.website || undefined
|
||||
}
|
||||
|
||||
withFeeds(feedsGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.logos = new Collection(logosGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getFeeds(): Collection {
|
||||
if (!this.feeds) return new Collection()
|
||||
|
||||
return this.feeds
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
let channels = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
channels = channels.concat(feed.getGuideChannels())
|
||||
})
|
||||
|
||||
return channels
|
||||
}
|
||||
|
||||
getGuideChannelNames(): Collection {
|
||||
return this.getGuideChannels()
|
||||
.map((channel: GuideChannel) => channel.siteName)
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
let streams = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
streams = streams.concat(feed.getStreams())
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
|
||||
getStreamNames(): Collection {
|
||||
return this.getStreams()
|
||||
.map((stream: Stream) => stream.getName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getFeedFullNames(): Collection {
|
||||
return this.getFeeds()
|
||||
.map((feed: Feed) => feed.getFullName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getAltNames(): Collection {
|
||||
return this.altNames || new Collection()
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function feed(logo: Logo): number {
|
||||
if (!logo.feed) return 1
|
||||
if (logo.feed.isMain) return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: { [key: string]: number } = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([feed, format, size], ['desc', 'desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
|
||||
getSearchable(): ChannelSearchableData {
|
||||
return {
|
||||
id: this.getId(),
|
||||
name: this.getName(),
|
||||
altNames: this.getAltNames().all(),
|
||||
guideNames: this.getGuideChannelNames().all(),
|
||||
streamNames: this.getStreamNames().all(),
|
||||
feedFullNames: this.getFeedFullNames().all()
|
||||
}
|
||||
}
|
||||
}
|
||||
import { ChannelData, ChannelSearchableData } from '../types/channel'
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { Stream, Feed, Logo, GuideChannel } from './'
|
||||
|
||||
export class Channel {
|
||||
id?: string
|
||||
name?: string
|
||||
altNames?: Collection
|
||||
network?: string
|
||||
owners?: Collection
|
||||
countryCode?: string
|
||||
subdivisionCode?: string
|
||||
cityName?: string
|
||||
categoryIds?: Collection
|
||||
isNSFW = false
|
||||
launched?: string
|
||||
closed?: string
|
||||
replacedBy?: string
|
||||
website?: string
|
||||
feeds?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data?: ChannelData) {
|
||||
if (!data) return
|
||||
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.altNames = new Collection(data.alt_names)
|
||||
this.network = data.network || undefined
|
||||
this.owners = new Collection(data.owners)
|
||||
this.countryCode = data.country
|
||||
this.subdivisionCode = data.subdivision || undefined
|
||||
this.cityName = data.city || undefined
|
||||
this.categoryIds = new Collection(data.categories)
|
||||
this.isNSFW = data.is_nsfw
|
||||
this.launched = data.launched || undefined
|
||||
this.closed = data.closed || undefined
|
||||
this.replacedBy = data.replaced_by || undefined
|
||||
this.website = data.website || undefined
|
||||
}
|
||||
|
||||
withFeeds(feedsGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.feeds = new Collection(feedsGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByChannelId: Dictionary): this {
|
||||
if (this.id) this.logos = new Collection(logosGroupedByChannelId.get(this.id))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getFeeds(): Collection {
|
||||
if (!this.feeds) return new Collection()
|
||||
|
||||
return this.feeds
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
let channels = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
channels = channels.concat(feed.getGuideChannels())
|
||||
})
|
||||
|
||||
return channels
|
||||
}
|
||||
|
||||
getGuideChannelNames(): Collection {
|
||||
return this.getGuideChannels()
|
||||
.map((channel: GuideChannel) => channel.siteName)
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
let streams = new Collection()
|
||||
|
||||
this.getFeeds().forEach((feed: Feed) => {
|
||||
streams = streams.concat(feed.getStreams())
|
||||
})
|
||||
|
||||
return streams
|
||||
}
|
||||
|
||||
getStreamNames(): Collection {
|
||||
return this.getStreams()
|
||||
.map((stream: Stream) => stream.getName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getFeedFullNames(): Collection {
|
||||
return this.getFeeds()
|
||||
.map((feed: Feed) => feed.getFullName())
|
||||
.uniq()
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getAltNames(): Collection {
|
||||
return this.altNames || new Collection()
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function feed(logo: Logo): number {
|
||||
if (!logo.feed) return 1
|
||||
if (logo.feed.isMain) return 1
|
||||
|
||||
return 0
|
||||
}
|
||||
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: Record<string, number> = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([feed, format, size], ['desc', 'desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
|
||||
getSearchable(): ChannelSearchableData {
|
||||
return {
|
||||
id: this.getId(),
|
||||
name: this.getName(),
|
||||
altNames: this.getAltNames().all(),
|
||||
guideNames: this.getGuideChannelNames().all(),
|
||||
streamNames: this.getStreamNames().all(),
|
||||
feedFullNames: this.getFeedFullNames().all()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,77 +1,77 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
|
||||
export class ChannelList {
|
||||
channels: Collection = new Collection()
|
||||
|
||||
constructor(data: { channels: epgGrabber.Channel[] }) {
|
||||
this.channels = new Collection(data.channels)
|
||||
}
|
||||
|
||||
add(channel: epgGrabber.Channel): this {
|
||||
this.channels.add(channel)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
get(siteId: string): epgGrabber.Channel | undefined {
|
||||
return this.channels.find((channel: epgGrabber.Channel) => channel.site_id == siteId)
|
||||
}
|
||||
|
||||
sort(): this {
|
||||
this.channels = this.channels.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.lang || '_',
|
||||
(channel: epgGrabber.Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: epgGrabber.Channel) => channel.site_id
|
||||
])
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toString() {
|
||||
function escapeString(value: string, defaultValue: string = '') {
|
||||
if (!value) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
|
||||
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
|
||||
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
|
||||
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
|
||||
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
|
||||
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
|
||||
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
|
||||
'g'
|
||||
)
|
||||
|
||||
value = String(value || '').replace(regex, '')
|
||||
|
||||
return value
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/\n|\r/g, ' ')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
let output = '<?xml version="1.0" encoding="UTF-8"?>\r\n<channels>\r\n'
|
||||
|
||||
this.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
const site = channel.site || ''
|
||||
const displayName = channel.name ? escapeString(channel.name) : ''
|
||||
|
||||
output += ` <channel site="${site}" lang="${lang}" xmltv_id="${xmltv_id}" site_id="${site_id}"${logo}>${displayName}</channel>\r\n`
|
||||
})
|
||||
|
||||
output += '</channels>\r\n'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
|
||||
export class ChannelList {
|
||||
channels: Collection = new Collection()
|
||||
|
||||
constructor(data: { channels: epgGrabber.Channel[] }) {
|
||||
this.channels = new Collection(data.channels)
|
||||
}
|
||||
|
||||
add(channel: epgGrabber.Channel): this {
|
||||
this.channels.add(channel)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
get(siteId: string): epgGrabber.Channel | undefined {
|
||||
return this.channels.find((channel: epgGrabber.Channel) => channel.site_id == siteId)
|
||||
}
|
||||
|
||||
sort(): this {
|
||||
this.channels = this.channels.orderBy([
|
||||
(channel: epgGrabber.Channel) => channel.lang || '_',
|
||||
(channel: epgGrabber.Channel) => (channel.xmltv_id ? channel.xmltv_id.toLowerCase() : '0'),
|
||||
(channel: epgGrabber.Channel) => channel.site_id
|
||||
])
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
toString() {
|
||||
function escapeString(value: string, defaultValue = '') {
|
||||
if (!value) return defaultValue
|
||||
|
||||
const regex = new RegExp(
|
||||
'((?:[\0-\x08\x0B\f\x0E-\x1F\uFFFD\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]))|([\\x7F-\\x84]|[\\x86-\\x9F]|[\\uFDD0-\\uFDEF]|(?:\\uD83F[\\uDFFE\\uDFFF])|(?:\\uD87F[\\uDF' +
|
||||
'FE\\uDFFF])|(?:\\uD8BF[\\uDFFE\\uDFFF])|(?:\\uD8FF[\\uDFFE\\uDFFF])|(?:\\uD93F[\\uDFFE\\uD' +
|
||||
'FFF])|(?:\\uD97F[\\uDFFE\\uDFFF])|(?:\\uD9BF[\\uDFFE\\uDFFF])|(?:\\uD9FF[\\uDFFE\\uDFFF])' +
|
||||
'|(?:\\uDA3F[\\uDFFE\\uDFFF])|(?:\\uDA7F[\\uDFFE\\uDFFF])|(?:\\uDABF[\\uDFFE\\uDFFF])|(?:\\' +
|
||||
'uDAFF[\\uDFFE\\uDFFF])|(?:\\uDB3F[\\uDFFE\\uDFFF])|(?:\\uDB7F[\\uDFFE\\uDFFF])|(?:\\uDBBF' +
|
||||
'[\\uDFFE\\uDFFF])|(?:\\uDBFF[\\uDFFE\\uDFFF])(?:[\\0-\\t\\x0B\\f\\x0E-\\u2027\\u202A-\\uD7FF\\' +
|
||||
'uE000-\\uFFFF]|[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]|[\\uD800-\\uDBFF](?![\\uDC00-\\uDFFF])|' +
|
||||
'(?:[^\\uD800-\\uDBFF]|^)[\\uDC00-\\uDFFF]))',
|
||||
'g'
|
||||
)
|
||||
|
||||
value = String(value || '').replace(regex, '')
|
||||
|
||||
return value
|
||||
.replace(/&/g, '&')
|
||||
.replace(/</g, '<')
|
||||
.replace(/>/g, '>')
|
||||
.replace(/"/g, '"')
|
||||
.replace(/'/g, ''')
|
||||
.replace(/\n|\r/g, ' ')
|
||||
.replace(/ +/g, ' ')
|
||||
.trim()
|
||||
}
|
||||
|
||||
let output = '<?xml version="1.0" encoding="UTF-8"?>\r\n<channels>\r\n'
|
||||
|
||||
this.channels.forEach((channel: epgGrabber.Channel) => {
|
||||
const logo = channel.logo ? ` logo="${channel.logo}"` : ''
|
||||
const xmltv_id = channel.xmltv_id ? escapeString(channel.xmltv_id) : ''
|
||||
const lang = channel.lang || ''
|
||||
const site_id = channel.site_id || ''
|
||||
const site = channel.site || ''
|
||||
const displayName = channel.name ? escapeString(channel.name) : ''
|
||||
|
||||
output += ` <channel site="${site}" lang="${lang}" xmltv_id="${xmltv_id}" site_id="${site_id}"${logo}>${displayName}</channel>\r\n`
|
||||
})
|
||||
|
||||
output += '</channels>\r\n'
|
||||
|
||||
return output
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,124 +1,124 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { FeedData } from '../types/feed'
|
||||
import { Logo, Channel } from '.'
|
||||
|
||||
export class Feed {
|
||||
channelId: string
|
||||
channel?: Channel
|
||||
id: string
|
||||
name: string
|
||||
isMain: boolean
|
||||
broadcastAreaCodes: Collection
|
||||
languageCodes: Collection
|
||||
timezoneIds: Collection
|
||||
videoFormat: string
|
||||
guideChannels?: Collection
|
||||
streams?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data: FeedData) {
|
||||
this.channelId = data.channel
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.isMain = data.is_main
|
||||
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||
this.languageCodes = new Collection(data.languages)
|
||||
this.timezoneIds = new Collection(data.timezones)
|
||||
this.videoFormat = data.video_format
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withStreams(streamsGroupedById: Dictionary): this {
|
||||
this.streams = new Collection(streamsGroupedById.get(`${this.channelId}@${this.id}`))
|
||||
|
||||
if (this.isMain) {
|
||||
this.streams = this.streams.concat(new Collection(streamsGroupedById.get(this.channelId)))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withGuideChannels(guideChannelsGroupedByStreamId: Dictionary): this {
|
||||
this.guideChannels = new Collection(
|
||||
guideChannelsGroupedByStreamId.get(`${this.channelId}@${this.id}`)
|
||||
)
|
||||
|
||||
if (this.isMain) {
|
||||
this.guideChannels = this.guideChannels.concat(
|
||||
new Collection(guideChannelsGroupedByStreamId.get(this.channelId))
|
||||
)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByStreamId: Dictionary): this {
|
||||
this.logos = new Collection(logosGroupedByStreamId.get(this.getStreamId()))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
if (!this.guideChannels) return new Collection()
|
||||
|
||||
return this.guideChannels
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
if (!this.streams) return new Collection()
|
||||
|
||||
return this.streams
|
||||
}
|
||||
|
||||
getFullName(): string {
|
||||
if (!this.channel) return ''
|
||||
|
||||
return `${this.channel.name} ${this.name}`
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
return `${this.channelId}@${this.id}`
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: { [key: string]: number } = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([format, size], ['desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
}
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
import { FeedData } from '../types/feed'
|
||||
import { Logo, Channel } from '.'
|
||||
|
||||
export class Feed {
|
||||
channelId: string
|
||||
channel?: Channel
|
||||
id: string
|
||||
name: string
|
||||
isMain: boolean
|
||||
broadcastAreaCodes: Collection
|
||||
languageCodes: Collection
|
||||
timezoneIds: Collection
|
||||
videoFormat: string
|
||||
guideChannels?: Collection
|
||||
streams?: Collection
|
||||
logos: Collection = new Collection()
|
||||
|
||||
constructor(data: FeedData) {
|
||||
this.channelId = data.channel
|
||||
this.id = data.id
|
||||
this.name = data.name
|
||||
this.isMain = data.is_main
|
||||
this.broadcastAreaCodes = new Collection(data.broadcast_area)
|
||||
this.languageCodes = new Collection(data.languages)
|
||||
this.timezoneIds = new Collection(data.timezones)
|
||||
this.videoFormat = data.video_format
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withStreams(streamsGroupedById: Dictionary): this {
|
||||
this.streams = new Collection(streamsGroupedById.get(`${this.channelId}@${this.id}`))
|
||||
|
||||
if (this.isMain) {
|
||||
this.streams = this.streams.concat(new Collection(streamsGroupedById.get(this.channelId)))
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withGuideChannels(guideChannelsGroupedByStreamId: Dictionary): this {
|
||||
this.guideChannels = new Collection(
|
||||
guideChannelsGroupedByStreamId.get(`${this.channelId}@${this.id}`)
|
||||
)
|
||||
|
||||
if (this.isMain) {
|
||||
this.guideChannels = this.guideChannels.concat(
|
||||
new Collection(guideChannelsGroupedByStreamId.get(this.channelId))
|
||||
)
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withLogos(logosGroupedByStreamId: Dictionary): this {
|
||||
this.logos = new Collection(logosGroupedByStreamId.get(this.getStreamId()))
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getGuideChannels(): Collection {
|
||||
if (!this.guideChannels) return new Collection()
|
||||
|
||||
return this.guideChannels
|
||||
}
|
||||
|
||||
getStreams(): Collection {
|
||||
if (!this.streams) return new Collection()
|
||||
|
||||
return this.streams
|
||||
}
|
||||
|
||||
getFullName(): string {
|
||||
if (!this.channel) return ''
|
||||
|
||||
return `${this.channel.name} ${this.name}`
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
return `${this.channelId}@${this.id}`
|
||||
}
|
||||
|
||||
getLogos(): Collection {
|
||||
function format(logo: Logo): number {
|
||||
const levelByFormat: Record<string, number> = {
|
||||
SVG: 0,
|
||||
PNG: 3,
|
||||
APNG: 1,
|
||||
WebP: 1,
|
||||
AVIF: 1,
|
||||
JPEG: 2,
|
||||
GIF: 1
|
||||
}
|
||||
|
||||
return logo.format ? levelByFormat[logo.format] : 0
|
||||
}
|
||||
|
||||
function size(logo: Logo): number {
|
||||
return Math.abs(512 - logo.width) + Math.abs(512 - logo.height)
|
||||
}
|
||||
|
||||
return this.logos.orderBy([format, size], ['desc', 'asc'], false)
|
||||
}
|
||||
|
||||
getLogo(): Logo | undefined {
|
||||
return this.getLogos().first()
|
||||
}
|
||||
|
||||
hasLogo(): boolean {
|
||||
return this.getLogos().notEmpty()
|
||||
}
|
||||
|
||||
getLogoUrl(): string {
|
||||
const logo = this.getLogo()
|
||||
if (!logo) return ''
|
||||
|
||||
return logo.url || ''
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +1,35 @@
|
||||
import { Collection, DateTime } from '@freearhey/core'
|
||||
import { generateXMLTV } from 'epg-grabber'
|
||||
|
||||
type GuideData = {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
}
|
||||
|
||||
export class Guide {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
|
||||
constructor({ channels, programs, filepath, gzip }: GuideData) {
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.filepath = filepath
|
||||
this.gzip = gzip || false
|
||||
}
|
||||
|
||||
toString() {
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString(), {
|
||||
timezone: 'UTC'
|
||||
})
|
||||
|
||||
return generateXMLTV({
|
||||
channels: this.channels.all(),
|
||||
programs: this.programs.all(),
|
||||
date: currDate.toJSON()
|
||||
})
|
||||
}
|
||||
}
|
||||
import { Collection, DateTime } from '@freearhey/core'
|
||||
import { generateXMLTV } from 'epg-grabber'
|
||||
|
||||
interface GuideData {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
}
|
||||
|
||||
export class Guide {
|
||||
channels: Collection
|
||||
programs: Collection
|
||||
filepath: string
|
||||
gzip: boolean
|
||||
|
||||
constructor({ channels, programs, filepath, gzip }: GuideData) {
|
||||
this.channels = channels
|
||||
this.programs = programs
|
||||
this.filepath = filepath
|
||||
this.gzip = gzip || false
|
||||
}
|
||||
|
||||
toString() {
|
||||
const currDate = new DateTime(process.env.CURR_DATE || new Date().toISOString(), {
|
||||
timezone: 'UTC'
|
||||
})
|
||||
|
||||
return generateXMLTV({
|
||||
channels: this.channels.all(),
|
||||
programs: this.programs.all(),
|
||||
date: currDate.toJSON()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { OWNER, REPO } from '../constants'
|
||||
|
||||
type IssueProps = {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import { OWNER, REPO } from '../constants'
|
||||
|
||||
interface IssueProps {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
}
|
||||
|
||||
export class Issue {
|
||||
number: number
|
||||
labels: string[]
|
||||
data: Dictionary
|
||||
|
||||
constructor({ number, labels, data }: IssueProps) {
|
||||
this.number = number
|
||||
this.labels = labels
|
||||
this.data = data
|
||||
}
|
||||
|
||||
getURL() {
|
||||
return `https://github.com/${OWNER}/${REPO}/issues/${this.number}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
import { Collection, type Dictionary } from '@freearhey/core'
|
||||
import type { LogoData } from '../types/logo'
|
||||
import { type Feed } from './feed'
|
||||
|
||||
export class Logo {
|
||||
channelId?: string
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
tags: Collection = new Collection()
|
||||
width: number = 0
|
||||
height: number = 0
|
||||
format?: string
|
||||
url?: string
|
||||
|
||||
constructor(data?: LogoData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.feedId = data.feed || undefined
|
||||
this.tags = new Collection(data.tags)
|
||||
this.width = data.width
|
||||
this.height = data.height
|
||||
this.format = data.format || undefined
|
||||
this.url = data.url
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (!this.feedId) return this
|
||||
|
||||
this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
}
|
||||
import { Collection, type Dictionary } from '@freearhey/core'
|
||||
import type { LogoData } from '../types/logo'
|
||||
import { type Feed } from './feed'
|
||||
|
||||
export class Logo {
|
||||
channelId?: string
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
tags: Collection = new Collection()
|
||||
width = 0
|
||||
height = 0
|
||||
format?: string
|
||||
url?: string
|
||||
|
||||
constructor(data?: LogoData) {
|
||||
if (!data) return
|
||||
|
||||
this.channelId = data.channel
|
||||
this.feedId = data.feed || undefined
|
||||
this.tags = new Collection(data.tags)
|
||||
this.width = data.width
|
||||
this.height = data.height
|
||||
this.format = data.format || undefined
|
||||
this.url = data.url
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (!this.feedId) return this
|
||||
|
||||
this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,63 +1,63 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
type Status = {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
type SiteProps = {
|
||||
domain: string
|
||||
totalChannels?: number
|
||||
markedChannels?: number
|
||||
issues: Collection
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
totalChannels: number
|
||||
markedChannels: number
|
||||
issues: Collection
|
||||
|
||||
constructor({ domain, totalChannels = 0, markedChannels = 0, issues }: SiteProps) {
|
||||
this.domain = domain
|
||||
this.totalChannels = totalChannels
|
||||
this.markedChannels = markedChannels
|
||||
this.issues = issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.notEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.notEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssues(): Collection {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Issue } from './'
|
||||
|
||||
enum StatusCode {
|
||||
DOWN = 'down',
|
||||
WARNING = 'warning',
|
||||
OK = 'ok'
|
||||
}
|
||||
|
||||
interface Status {
|
||||
code: StatusCode
|
||||
emoji: string
|
||||
}
|
||||
|
||||
interface SiteProps {
|
||||
domain: string
|
||||
totalChannels?: number
|
||||
markedChannels?: number
|
||||
issues: Collection
|
||||
}
|
||||
|
||||
export class Site {
|
||||
domain: string
|
||||
totalChannels: number
|
||||
markedChannels: number
|
||||
issues: Collection
|
||||
|
||||
constructor({ domain, totalChannels = 0, markedChannels = 0, issues }: SiteProps) {
|
||||
this.domain = domain
|
||||
this.totalChannels = totalChannels
|
||||
this.markedChannels = markedChannels
|
||||
this.issues = issues
|
||||
}
|
||||
|
||||
getStatus(): Status {
|
||||
const issuesWithStatusDown = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:down')
|
||||
)
|
||||
if (issuesWithStatusDown.notEmpty())
|
||||
return {
|
||||
code: StatusCode.DOWN,
|
||||
emoji: '🔴'
|
||||
}
|
||||
|
||||
const issuesWithStatusWarning = this.issues.filter((issue: Issue) =>
|
||||
issue.labels.find(label => label === 'status:warning')
|
||||
)
|
||||
if (issuesWithStatusWarning.notEmpty())
|
||||
return {
|
||||
code: StatusCode.WARNING,
|
||||
emoji: '🟡'
|
||||
}
|
||||
|
||||
return {
|
||||
code: StatusCode.OK,
|
||||
emoji: '🟢'
|
||||
}
|
||||
}
|
||||
|
||||
getIssues(): Collection {
|
||||
return this.issues.map((issue: Issue) => issue.getURL())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,58 +1,58 @@
|
||||
import type { StreamData } from '../types/stream'
|
||||
import { Feed, Channel } from './index'
|
||||
|
||||
export class Stream {
|
||||
name?: string
|
||||
url: string
|
||||
id?: string
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
filepath?: string
|
||||
line?: number
|
||||
label?: string
|
||||
verticalResolution?: number
|
||||
isInterlaced?: boolean
|
||||
referrer?: string
|
||||
userAgent?: string
|
||||
groupTitle: string = 'Undefined'
|
||||
removed: boolean = false
|
||||
|
||||
constructor(data: StreamData) {
|
||||
const id = data.channel && data.feed ? [data.channel, data.feed].join('@') : data.channel
|
||||
const { verticalResolution, isInterlaced } = parseQuality(data.quality)
|
||||
|
||||
this.id = id || undefined
|
||||
this.channelId = data.channel || undefined
|
||||
this.feedId = data.feed || undefined
|
||||
this.name = data.name || undefined
|
||||
this.url = data.url
|
||||
this.referrer = data.referrer || undefined
|
||||
this.userAgent = data.user_agent || undefined
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
this.label = data.label || undefined
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
}
|
||||
|
||||
function parseQuality(quality: string | null): {
|
||||
verticalResolution: number | null
|
||||
isInterlaced: boolean | null
|
||||
} {
|
||||
if (!quality) return { verticalResolution: null, isInterlaced: null }
|
||||
const [, verticalResolutionString] = quality.match(/^(\d+)/) || [null, undefined]
|
||||
const isInterlaced = /i$/i.test(quality)
|
||||
let verticalResolution = 0
|
||||
if (verticalResolutionString) verticalResolution = parseInt(verticalResolutionString)
|
||||
|
||||
return { verticalResolution, isInterlaced }
|
||||
}
|
||||
import type { StreamData } from '../types/stream'
|
||||
import { Feed, Channel } from './index'
|
||||
|
||||
export class Stream {
|
||||
name?: string
|
||||
url: string
|
||||
id?: string
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
filepath?: string
|
||||
line?: number
|
||||
label?: string
|
||||
verticalResolution?: number
|
||||
isInterlaced?: boolean
|
||||
referrer?: string
|
||||
userAgent?: string
|
||||
groupTitle = 'Undefined'
|
||||
removed = false
|
||||
|
||||
constructor(data: StreamData) {
|
||||
const id = data.channel && data.feed ? [data.channel, data.feed].join('@') : data.channel
|
||||
const { verticalResolution, isInterlaced } = parseQuality(data.quality)
|
||||
|
||||
this.id = id || undefined
|
||||
this.channelId = data.channel || undefined
|
||||
this.feedId = data.feed || undefined
|
||||
this.name = data.name || undefined
|
||||
this.url = data.url
|
||||
this.referrer = data.referrer || undefined
|
||||
this.userAgent = data.user_agent || undefined
|
||||
this.verticalResolution = verticalResolution || undefined
|
||||
this.isInterlaced = isInterlaced || undefined
|
||||
this.label = data.label || undefined
|
||||
}
|
||||
|
||||
getId(): string {
|
||||
return this.id || ''
|
||||
}
|
||||
|
||||
getName(): string {
|
||||
return this.name || ''
|
||||
}
|
||||
}
|
||||
|
||||
function parseQuality(quality: string | null): {
|
||||
verticalResolution: number | null
|
||||
isInterlaced: boolean | null
|
||||
} {
|
||||
if (!quality) return { verticalResolution: null, isInterlaced: null }
|
||||
const [, verticalResolutionString] = quality.match(/^(\d+)/) || [null, undefined]
|
||||
const isInterlaced = /i$/i.test(quality)
|
||||
let verticalResolution = 0
|
||||
if (verticalResolutionString) verticalResolution = parseInt(verticalResolutionString)
|
||||
|
||||
return { verticalResolution, isInterlaced }
|
||||
}
|
||||
|
||||
54
scripts/types/channel.d.ts
vendored
54
scripts/types/channel.d.ts
vendored
@@ -1,27 +1,27 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export type ChannelData = {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
categories: Collection
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
}
|
||||
|
||||
export type ChannelSearchableData = {
|
||||
id: string
|
||||
name: string
|
||||
altNames: string[]
|
||||
guideNames: string[]
|
||||
streamNames: string[]
|
||||
feedFullNames: string[]
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export interface ChannelData {
|
||||
id: string
|
||||
name: string
|
||||
alt_names: string[]
|
||||
network: string
|
||||
owners: Collection
|
||||
country: string
|
||||
subdivision: string
|
||||
city: string
|
||||
categories: Collection
|
||||
is_nsfw: boolean
|
||||
launched: string
|
||||
closed: string
|
||||
replaced_by: string
|
||||
website: string
|
||||
}
|
||||
|
||||
export interface ChannelSearchableData {
|
||||
id: string
|
||||
name: string
|
||||
altNames: string[]
|
||||
guideNames: string[]
|
||||
streamNames: string[]
|
||||
feedFullNames: string[]
|
||||
}
|
||||
|
||||
40
scripts/types/dataLoader.d.ts
vendored
40
scripts/types/dataLoader.d.ts
vendored
@@ -1,20 +1,20 @@
|
||||
import { Storage } from '@freearhey/core'
|
||||
|
||||
export type DataLoaderProps = {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export type DataLoaderData = {
|
||||
countries: object | object[]
|
||||
regions: object | object[]
|
||||
subdivisions: object | object[]
|
||||
languages: object | object[]
|
||||
categories: object | object[]
|
||||
blocklist: object | object[]
|
||||
channels: object | object[]
|
||||
feeds: object | object[]
|
||||
timezones: object | object[]
|
||||
guides: object | object[]
|
||||
streams: object | object[]
|
||||
logos: object | object[]
|
||||
}
|
||||
import { Storage } from '@freearhey/core'
|
||||
|
||||
export interface DataLoaderProps {
|
||||
storage: Storage
|
||||
}
|
||||
|
||||
export interface DataLoaderData {
|
||||
countries: object | object[]
|
||||
regions: object | object[]
|
||||
subdivisions: object | object[]
|
||||
languages: object | object[]
|
||||
categories: object | object[]
|
||||
blocklist: object | object[]
|
||||
channels: object | object[]
|
||||
feeds: object | object[]
|
||||
timezones: object | object[]
|
||||
guides: object | object[]
|
||||
streams: object | object[]
|
||||
logos: object | object[]
|
||||
}
|
||||
|
||||
32
scripts/types/dataProcessor.d.ts
vendored
32
scripts/types/dataProcessor.d.ts
vendored
@@ -1,16 +1,16 @@
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
|
||||
export type DataProcessorData = {
|
||||
guideChannelsGroupedByStreamId: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
logosGroupedByChannelId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
feedsKeyByStreamId: Dictionary
|
||||
streamsGroupedById: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
guideChannels: Collection
|
||||
channels: Collection
|
||||
streams: Collection
|
||||
feeds: Collection
|
||||
logos: Collection
|
||||
}
|
||||
import { Collection, Dictionary } from '@freearhey/core'
|
||||
|
||||
export interface DataProcessorData {
|
||||
guideChannelsGroupedByStreamId: Dictionary
|
||||
feedsGroupedByChannelId: Dictionary
|
||||
logosGroupedByChannelId: Dictionary
|
||||
logosGroupedByStreamId: Dictionary
|
||||
feedsKeyByStreamId: Dictionary
|
||||
streamsGroupedById: Dictionary
|
||||
channelsKeyById: Dictionary
|
||||
guideChannels: Collection
|
||||
channels: Collection
|
||||
streams: Collection
|
||||
feeds: Collection
|
||||
logos: Collection
|
||||
}
|
||||
|
||||
24
scripts/types/feed.d.ts
vendored
24
scripts/types/feed.d.ts
vendored
@@ -1,12 +1,12 @@
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export type FeedData = {
|
||||
channel: string
|
||||
id: string
|
||||
name: string
|
||||
is_main: boolean
|
||||
broadcast_area: Collection
|
||||
languages: Collection
|
||||
timezones: Collection
|
||||
video_format: string
|
||||
}
|
||||
import { Collection } from '@freearhey/core'
|
||||
|
||||
export interface FeedData {
|
||||
channel: string
|
||||
id: string
|
||||
name: string
|
||||
is_main: boolean
|
||||
broadcast_area: Collection
|
||||
languages: Collection
|
||||
timezones: Collection
|
||||
video_format: string
|
||||
}
|
||||
|
||||
16
scripts/types/guide.d.ts
vendored
16
scripts/types/guide.d.ts
vendored
@@ -1,8 +1,8 @@
|
||||
export type GuideData = {
|
||||
channel: string
|
||||
feed: string
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
export interface GuideData {
|
||||
channel: string
|
||||
feed: string
|
||||
site: string
|
||||
site_id: string
|
||||
site_name: string
|
||||
lang: string
|
||||
}
|
||||
|
||||
18
scripts/types/logo.d.ts
vendored
18
scripts/types/logo.d.ts
vendored
@@ -1,9 +1,9 @@
|
||||
export type LogoData = {
|
||||
channel: string
|
||||
feed: string | null
|
||||
tags: string[]
|
||||
width: number
|
||||
height: number
|
||||
format: string | null
|
||||
url: string
|
||||
}
|
||||
export interface LogoData {
|
||||
channel: string
|
||||
feed: string | null
|
||||
tags: string[]
|
||||
width: number
|
||||
height: number
|
||||
format: string | null
|
||||
url: string
|
||||
}
|
||||
|
||||
20
scripts/types/stream.d.ts
vendored
20
scripts/types/stream.d.ts
vendored
@@ -1,10 +1,10 @@
|
||||
export type StreamData = {
|
||||
channel: string | null
|
||||
feed: string | null
|
||||
name?: string
|
||||
url: string
|
||||
referrer: string | null
|
||||
user_agent: string | null
|
||||
quality: string | null
|
||||
label: string | null
|
||||
}
|
||||
export interface StreamData {
|
||||
channel: string | null
|
||||
feed: string | null
|
||||
name?: string
|
||||
url: string
|
||||
referrer: string | null
|
||||
user_agent: string | null
|
||||
quality: string | null
|
||||
label: string | null
|
||||
}
|
||||
|
||||
@@ -1,41 +1,41 @@
|
||||
const { parser, url } = require('./tvim.tv.config.js')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
dayjs.extend(customParseFormat)
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = dayjs.utc('2021-10-24', 'YYYY-MM-DD').startOf('d')
|
||||
const channel = { site_id: 'T7', xmltv_id: 'T7.rs' }
|
||||
const content = fs.readFileSync(path.resolve(__dirname, '__data__/content.json'))
|
||||
|
||||
it('can generate valid url', () => {
|
||||
const result = url({ date, channel })
|
||||
expect(result).toBe(
|
||||
'https://www.tvim.tv/script/program_epg?date=24.10.2021&prog=T7&server_time=true'
|
||||
)
|
||||
})
|
||||
|
||||
it('can parse response', () => {
|
||||
const result = parser({ date, channel, content })
|
||||
expect(result).toMatchObject([
|
||||
{
|
||||
start: 'Sat, 23 Oct 2021 22:00:00 GMT',
|
||||
stop: 'Sun, 24 Oct 2021 02:00:00 GMT',
|
||||
title: 'Programi i T7',
|
||||
description: 'Programi i T7',
|
||||
category: 'test'
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('can handle empty guide', () => {
|
||||
const result = parser({
|
||||
date,
|
||||
channel,
|
||||
content: fs.readFileSync(path.resolve(__dirname, '__data__/no_content.json'))
|
||||
})
|
||||
expect(result).toMatchObject([])
|
||||
})
|
||||
const { parser, url } = require('./tvim.tv.config.js')
|
||||
const fs = require('fs')
|
||||
const path = require('path')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
dayjs.extend(customParseFormat)
|
||||
dayjs.extend(utc)
|
||||
|
||||
const date = dayjs.utc('2021-10-24', 'YYYY-MM-DD').startOf('d')
|
||||
const channel = { site_id: 'T7', xmltv_id: 'T7.rs' }
|
||||
const content = fs.readFileSync(path.resolve(__dirname, '__data__/content.json'))
|
||||
|
||||
it('can generate valid url', () => {
|
||||
const result = url({ date, channel })
|
||||
expect(result).toBe(
|
||||
'https://www.tvim.tv/script/program_epg?date=24.10.2021&prog=T7&server_time=true'
|
||||
)
|
||||
})
|
||||
|
||||
it('can parse response', () => {
|
||||
const result = parser({ date, channel, content })
|
||||
expect(result).toMatchObject([
|
||||
{
|
||||
start: 'Sat, 23 Oct 2021 22:00:00 GMT',
|
||||
stop: 'Sun, 24 Oct 2021 02:00:00 GMT',
|
||||
title: 'Programi i T7',
|
||||
description: 'Programi i T7',
|
||||
category: 'test'
|
||||
}
|
||||
])
|
||||
})
|
||||
|
||||
it('can handle empty guide', () => {
|
||||
const result = parser({
|
||||
date,
|
||||
channel,
|
||||
content: fs.readFileSync(path.resolve(__dirname, '__data__/no_content.json'))
|
||||
})
|
||||
expect(result).toMatchObject([])
|
||||
})
|
||||
|
||||
@@ -1,127 +1,127 @@
|
||||
const cheerio = require('cheerio')
|
||||
const axios = require('axios')
|
||||
const { DateTime } = require('luxon')
|
||||
|
||||
module.exports = {
|
||||
site: 'tvinsider.com',
|
||||
days: 2,
|
||||
url({ channel }) {
|
||||
return `https://www.tvinsider.com/network/${channel.site_id}/schedule/`
|
||||
},
|
||||
parser({ content, date }) {
|
||||
const programs = []
|
||||
const items = parseItems(content, date)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
const $item = cheerio.load(item)
|
||||
const episodeInfo = parseEP($item)
|
||||
let start = parseStart($item, date)
|
||||
if (!start) return
|
||||
if (prev) {
|
||||
prev.stop = start
|
||||
}
|
||||
const stop = start.plus({ minute: 30 })
|
||||
|
||||
programs.push({
|
||||
title: parseTitle($item),
|
||||
description: parseDescription($item),
|
||||
category: parseCategory($item),
|
||||
date: parseDate($item),
|
||||
...episodeInfo,
|
||||
subTitles: parseSubtitle($item),
|
||||
previouslyShown: parsePreviously($item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const html = await axios
|
||||
.get('https://www.tvinsider.com/network/5-star-max/')
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
const $ = cheerio.load(html)
|
||||
const items = $('body > main > section > select > option').toArray()
|
||||
|
||||
const channels = []
|
||||
items.forEach(item => {
|
||||
const name = $(item).text().trim()
|
||||
const path = $(item).attr('value')
|
||||
if (!path) return
|
||||
const [, , site_id] = path.split('/') || [null, null, null]
|
||||
if (!site_id) return
|
||||
|
||||
channels.push({
|
||||
lang: 'en',
|
||||
site_id,
|
||||
name
|
||||
})
|
||||
})
|
||||
|
||||
return channels
|
||||
}
|
||||
}
|
||||
|
||||
function parseTitle($item) {
|
||||
return $item('h3').text().trim()
|
||||
}
|
||||
function parseEP($item){
|
||||
const text = $item('h6').text().trim()
|
||||
const match = text.match(/Season\s+(\d+)\s*•\s*Episode\s+(\d+)/i)
|
||||
|
||||
if (!match) return {} // Return an empty object if no match, so properties are undefined later
|
||||
|
||||
const season = parseInt(match[1], 10)
|
||||
const episode = parseInt(match[2], 10)
|
||||
|
||||
return { season, episode } // Return an object with season and episode
|
||||
}
|
||||
|
||||
function parseSubtitle($item) {
|
||||
return $item('h5').text().trim()
|
||||
}
|
||||
|
||||
function parsePreviously($item){
|
||||
const h3Text = $item('h3').text().trim()
|
||||
const isNewShow = /New$/.test(h3Text)
|
||||
|
||||
if (isNewShow) {
|
||||
return null
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
function parseDescription($item) {
|
||||
return $item('p').text().trim()
|
||||
}
|
||||
|
||||
function parseCategory($item) {
|
||||
const [category] = $item('h4').text().trim().split(' • ')
|
||||
|
||||
return category
|
||||
}
|
||||
|
||||
function parseDate($item) {
|
||||
const [, date] = $item('h4').text().trim().split(' • ')
|
||||
|
||||
return date
|
||||
}
|
||||
|
||||
function parseStart($item, date) {
|
||||
let time = $item('time').text().trim()
|
||||
time = `${date.format('YYYY-MM-DD')} ${time}`
|
||||
|
||||
return DateTime.fromFormat(time, 'yyyy-MM-dd t', { zone: 'America/New_York' }).toUTC()
|
||||
}
|
||||
|
||||
function parseItems(content, date) {
|
||||
const $ = cheerio.load(content)
|
||||
|
||||
return $(`#${date.format('MM-DD-YYYY')}`)
|
||||
.next()
|
||||
.find('a')
|
||||
.toArray()
|
||||
}
|
||||
const cheerio = require('cheerio')
|
||||
const axios = require('axios')
|
||||
const { DateTime } = require('luxon')
|
||||
|
||||
module.exports = {
|
||||
site: 'tvinsider.com',
|
||||
days: 2,
|
||||
url({ channel }) {
|
||||
return `https://www.tvinsider.com/network/${channel.site_id}/schedule/`
|
||||
},
|
||||
parser({ content, date }) {
|
||||
const programs = []
|
||||
const items = parseItems(content, date)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
const $item = cheerio.load(item)
|
||||
const episodeInfo = parseEP($item)
|
||||
let start = parseStart($item, date)
|
||||
if (!start) return
|
||||
if (prev) {
|
||||
prev.stop = start
|
||||
}
|
||||
const stop = start.plus({ minute: 30 })
|
||||
|
||||
programs.push({
|
||||
title: parseTitle($item),
|
||||
description: parseDescription($item),
|
||||
category: parseCategory($item),
|
||||
date: parseDate($item),
|
||||
...episodeInfo,
|
||||
subTitles: parseSubtitle($item),
|
||||
previouslyShown: parsePreviously($item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const html = await axios
|
||||
.get('https://www.tvinsider.com/network/5-star-max/')
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
const $ = cheerio.load(html)
|
||||
const items = $('body > main > section > select > option').toArray()
|
||||
|
||||
const channels = []
|
||||
items.forEach(item => {
|
||||
const name = $(item).text().trim()
|
||||
const path = $(item).attr('value')
|
||||
if (!path) return
|
||||
const [, , site_id] = path.split('/') || [null, null, null]
|
||||
if (!site_id) return
|
||||
|
||||
channels.push({
|
||||
lang: 'en',
|
||||
site_id,
|
||||
name
|
||||
})
|
||||
})
|
||||
|
||||
return channels
|
||||
}
|
||||
}
|
||||
|
||||
function parseTitle($item) {
|
||||
return $item('h3').text().trim()
|
||||
}
|
||||
function parseEP($item){
|
||||
const text = $item('h6').text().trim()
|
||||
const match = text.match(/Season\s+(\d+)\s*•\s*Episode\s+(\d+)/i)
|
||||
|
||||
if (!match) return {} // Return an empty object if no match, so properties are undefined later
|
||||
|
||||
const season = parseInt(match[1], 10)
|
||||
const episode = parseInt(match[2], 10)
|
||||
|
||||
return { season, episode } // Return an object with season and episode
|
||||
}
|
||||
|
||||
function parseSubtitle($item) {
|
||||
return $item('h5').text().trim()
|
||||
}
|
||||
|
||||
function parsePreviously($item){
|
||||
const h3Text = $item('h3').text().trim()
|
||||
const isNewShow = /New$/.test(h3Text)
|
||||
|
||||
if (isNewShow) {
|
||||
return null
|
||||
} else {
|
||||
return {}
|
||||
}
|
||||
}
|
||||
|
||||
function parseDescription($item) {
|
||||
return $item('p').text().trim()
|
||||
}
|
||||
|
||||
function parseCategory($item) {
|
||||
const [category] = $item('h4').text().trim().split(' • ')
|
||||
|
||||
return category
|
||||
}
|
||||
|
||||
function parseDate($item) {
|
||||
const [, date] = $item('h4').text().trim().split(' • ')
|
||||
|
||||
return date
|
||||
}
|
||||
|
||||
function parseStart($item, date) {
|
||||
let time = $item('time').text().trim()
|
||||
time = `${date.format('YYYY-MM-DD')} ${time}`
|
||||
|
||||
return DateTime.fromFormat(time, 'yyyy-MM-dd t', { zone: 'America/New_York' }).toUTC()
|
||||
}
|
||||
|
||||
function parseItems(content, date) {
|
||||
const $ = cheerio.load(content)
|
||||
|
||||
return $(`#${date.format('MM-DD-YYYY')}`)
|
||||
.next()
|
||||
.find('a')
|
||||
.toArray()
|
||||
}
|
||||
|
||||
@@ -1,99 +1,99 @@
|
||||
const cheerio = require('cheerio')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const timezone = require('dayjs/plugin/timezone')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
const { uniqBy } = require('../../scripts/functions')
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(timezone)
|
||||
dayjs.extend(customParseFormat)
|
||||
|
||||
module.exports = {
|
||||
site: 'tvireland.ie',
|
||||
days: 2,
|
||||
url: function ({ date, channel }) {
|
||||
return `https://www.tvireland.ie/tv/listings/channel/${channel.site_id}?dt=${date.format(
|
||||
'YYYY-MM-DD'
|
||||
)}`
|
||||
},
|
||||
parser: function ({ content, date, channel }) {
|
||||
const programs = []
|
||||
const items = parseItems(content)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
const $item = cheerio.load(item)
|
||||
let start = parseStart($item, date, channel)
|
||||
if (prev) {
|
||||
if (start.isBefore(prev.start)) {
|
||||
start = start.add(1, 'd')
|
||||
date = date.add(1, 'd')
|
||||
}
|
||||
prev.stop = start
|
||||
}
|
||||
const stop = start.add(30, 'm')
|
||||
programs.push({
|
||||
title: parseTitle($item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const axios = require('axios')
|
||||
|
||||
const providers = ['-9000019', '-8000019', '-1000019', '-2000019', '-7000019']
|
||||
|
||||
const channels = []
|
||||
for (let provider of providers) {
|
||||
const data = await axios
|
||||
.post('https://www.tvireland.ie/tv/schedule', null, {
|
||||
params: {
|
||||
provider,
|
||||
region: 'Ireland',
|
||||
TVperiod: 'Night',
|
||||
date: dayjs().format('YYYY-MM-DD'),
|
||||
st: 0,
|
||||
u_time: 2027,
|
||||
is_mobile: 1
|
||||
}
|
||||
})
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
const $ = cheerio.load(data)
|
||||
$('.channelname').each((i, el) => {
|
||||
const name = $(el).find('center > a:eq(1)').text()
|
||||
const url = $(el).find('center > a:eq(1)').attr('href')
|
||||
const [, number, slug] = url.match(/\/(\d+)\/(.*)\.html$/)
|
||||
|
||||
channels.push({
|
||||
lang: 'en',
|
||||
name,
|
||||
site_id: `${number}/${slug}`
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return uniqBy(channels, x => x.site_id)
|
||||
}
|
||||
}
|
||||
|
||||
function parseStart($item, date) {
|
||||
const timeString = $item('td:eq(0)').text().trim()
|
||||
const dateString = `${date.format('YYYY-MM-DD')} ${timeString}`
|
||||
|
||||
return dayjs.tz(dateString, 'YYYY-MM-DD H:mm a', 'Europe/Dublin')
|
||||
}
|
||||
|
||||
function parseTitle($item) {
|
||||
return $item('td:eq(1)').text().trim()
|
||||
}
|
||||
|
||||
function parseItems(content) {
|
||||
const $ = cheerio.load(content)
|
||||
|
||||
return $('table.table > tbody > tr').toArray()
|
||||
}
|
||||
const cheerio = require('cheerio')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const timezone = require('dayjs/plugin/timezone')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
const { uniqBy } = require('../../scripts/functions')
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(timezone)
|
||||
dayjs.extend(customParseFormat)
|
||||
|
||||
module.exports = {
|
||||
site: 'tvireland.ie',
|
||||
days: 2,
|
||||
url: function ({ date, channel }) {
|
||||
return `https://www.tvireland.ie/tv/listings/channel/${channel.site_id}?dt=${date.format(
|
||||
'YYYY-MM-DD'
|
||||
)}`
|
||||
},
|
||||
parser: function ({ content, date, channel }) {
|
||||
const programs = []
|
||||
const items = parseItems(content)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
const $item = cheerio.load(item)
|
||||
let start = parseStart($item, date, channel)
|
||||
if (prev) {
|
||||
if (start.isBefore(prev.start)) {
|
||||
start = start.add(1, 'd')
|
||||
date = date.add(1, 'd')
|
||||
}
|
||||
prev.stop = start
|
||||
}
|
||||
const stop = start.add(30, 'm')
|
||||
programs.push({
|
||||
title: parseTitle($item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const axios = require('axios')
|
||||
|
||||
const providers = ['-9000019', '-8000019', '-1000019', '-2000019', '-7000019']
|
||||
|
||||
const channels = []
|
||||
for (let provider of providers) {
|
||||
const data = await axios
|
||||
.post('https://www.tvireland.ie/tv/schedule', null, {
|
||||
params: {
|
||||
provider,
|
||||
region: 'Ireland',
|
||||
TVperiod: 'Night',
|
||||
date: dayjs().format('YYYY-MM-DD'),
|
||||
st: 0,
|
||||
u_time: 2027,
|
||||
is_mobile: 1
|
||||
}
|
||||
})
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
const $ = cheerio.load(data)
|
||||
$('.channelname').each((i, el) => {
|
||||
const name = $(el).find('center > a:eq(1)').text()
|
||||
const url = $(el).find('center > a:eq(1)').attr('href')
|
||||
const [, number, slug] = url.match(/\/(\d+)\/(.*)\.html$/)
|
||||
|
||||
channels.push({
|
||||
lang: 'en',
|
||||
name,
|
||||
site_id: `${number}/${slug}`
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
return uniqBy(channels, x => x.site_id)
|
||||
}
|
||||
}
|
||||
|
||||
function parseStart($item, date) {
|
||||
const timeString = $item('td:eq(0)').text().trim()
|
||||
const dateString = `${date.format('YYYY-MM-DD')} ${timeString}`
|
||||
|
||||
return dayjs.tz(dateString, 'YYYY-MM-DD H:mm a', 'Europe/Dublin')
|
||||
}
|
||||
|
||||
function parseTitle($item) {
|
||||
return $item('td:eq(1)').text().trim()
|
||||
}
|
||||
|
||||
function parseItems(content) {
|
||||
const $ = cheerio.load(content)
|
||||
|
||||
return $('table.table > tbody > tr').toArray()
|
||||
}
|
||||
|
||||
@@ -1,81 +1,81 @@
|
||||
const axios = require('axios')
|
||||
const dayjs = require('dayjs')
|
||||
const { uniqBy } = require('../../scripts/functions')
|
||||
|
||||
module.exports = {
|
||||
site: 'tvmusor.hu',
|
||||
days: 2,
|
||||
url: 'https://tvmusor.borsonline.hu/a/get-events/',
|
||||
request: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
|
||||
},
|
||||
data({ channel, date }) {
|
||||
const params = new URLSearchParams()
|
||||
params.append(
|
||||
'data',
|
||||
JSON.stringify({
|
||||
blocks: [`${channel.site_id}|${date.format('YYYY-MM-DD')}`]
|
||||
})
|
||||
)
|
||||
|
||||
return params
|
||||
}
|
||||
},
|
||||
parser({ content, channel, date }) {
|
||||
let programs = []
|
||||
const items = parseItems(content, channel, date)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
let start = dayjs(item.e)
|
||||
let stop = dayjs(item.f)
|
||||
if (prev) {
|
||||
start = prev.stop
|
||||
}
|
||||
|
||||
programs.push({
|
||||
title: item.j,
|
||||
category: item.h,
|
||||
description: item.c,
|
||||
image: parseImage(item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const data = await axios
|
||||
.get('https://tvmusor.borsonline.hu/most/')
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
const [, channelData] = data.match(/const CHANNEL_DATA = (.*);/)
|
||||
const json = channelData.replace('},}', '}}').replace(/(\d+):/g, '"$1":')
|
||||
const channels = JSON.parse(json)
|
||||
|
||||
return Object.values(channels).map(item => {
|
||||
return {
|
||||
lang: 'hu',
|
||||
site_id: item.id,
|
||||
name: item.name
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function parseImage(item) {
|
||||
return item.z ? `https://tvmusor.borsonline.hu/images/events/408/${item.z}` : null
|
||||
}
|
||||
|
||||
function parseItems(content, channel, date) {
|
||||
const data = JSON.parse(content)
|
||||
if (!data || !data.data || !data.data.loadedBlocks) return []
|
||||
const blocks = data.data.loadedBlocks
|
||||
const blockId = `${channel.site_id}_${date.format('YYYY-MM-DD')}`
|
||||
if (!Array.isArray(blocks[blockId])) return []
|
||||
|
||||
return uniqBy(uniqBy(blocks[blockId], a => a.e), b => b.b)
|
||||
}
|
||||
const axios = require('axios')
|
||||
const dayjs = require('dayjs')
|
||||
const { uniqBy } = require('../../scripts/functions')
|
||||
|
||||
module.exports = {
|
||||
site: 'tvmusor.hu',
|
||||
days: 2,
|
||||
url: 'https://tvmusor.borsonline.hu/a/get-events/',
|
||||
request: {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8'
|
||||
},
|
||||
data({ channel, date }) {
|
||||
const params = new URLSearchParams()
|
||||
params.append(
|
||||
'data',
|
||||
JSON.stringify({
|
||||
blocks: [`${channel.site_id}|${date.format('YYYY-MM-DD')}`]
|
||||
})
|
||||
)
|
||||
|
||||
return params
|
||||
}
|
||||
},
|
||||
parser({ content, channel, date }) {
|
||||
let programs = []
|
||||
const items = parseItems(content, channel, date)
|
||||
items.forEach(item => {
|
||||
const prev = programs[programs.length - 1]
|
||||
let start = dayjs(item.e)
|
||||
let stop = dayjs(item.f)
|
||||
if (prev) {
|
||||
start = prev.stop
|
||||
}
|
||||
|
||||
programs.push({
|
||||
title: item.j,
|
||||
category: item.h,
|
||||
description: item.c,
|
||||
image: parseImage(item),
|
||||
start,
|
||||
stop
|
||||
})
|
||||
})
|
||||
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const data = await axios
|
||||
.get('https://tvmusor.borsonline.hu/most/')
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
const [, channelData] = data.match(/const CHANNEL_DATA = (.*);/)
|
||||
const json = channelData.replace('},}', '}}').replace(/(\d+):/g, '"$1":')
|
||||
const channels = JSON.parse(json)
|
||||
|
||||
return Object.values(channels).map(item => {
|
||||
return {
|
||||
lang: 'hu',
|
||||
site_id: item.id,
|
||||
name: item.name
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function parseImage(item) {
|
||||
return item.z ? `https://tvmusor.borsonline.hu/images/events/408/${item.z}` : null
|
||||
}
|
||||
|
||||
function parseItems(content, channel, date) {
|
||||
const data = JSON.parse(content)
|
||||
if (!data || !data.data || !data.data.loadedBlocks) return []
|
||||
const blocks = data.data.loadedBlocks
|
||||
const blockId = `${channel.site_id}_${date.format('YYYY-MM-DD')}`
|
||||
if (!Array.isArray(blocks[blockId])) return []
|
||||
|
||||
return uniqBy(uniqBy(blocks[blockId], a => a.e), b => b.b)
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,213 +1,213 @@
|
||||
const axios = require('axios')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
|
||||
let X_CSRFTOKEN
|
||||
let Cookie
|
||||
const cookiesToExtract = ['JSESSIONID', 'CSESSIONID', 'CSRFSESSION']
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(customParseFormat)
|
||||
|
||||
module.exports = {
|
||||
site: 'web.magentatv.de',
|
||||
days: 2,
|
||||
url: 'https://api.prod.sngtv.magentatv.de/EPG/JSON/PlayBillList',
|
||||
request: {
|
||||
method: 'POST',
|
||||
async headers() {
|
||||
return await setHeaders()
|
||||
},
|
||||
data({ channel, date }) {
|
||||
return {
|
||||
count: -1,
|
||||
isFillProgram: 1,
|
||||
offset: 0,
|
||||
properties: [
|
||||
{
|
||||
include:
|
||||
'endtime,genres,id,name,starttime,channelid,pictures,introduce,subName,seasonNum,subNum,cast,country,producedate,externalIds',
|
||||
name: 'playbill'
|
||||
}
|
||||
],
|
||||
type: 2,
|
||||
begintime: date.format('YYYYMMDD000000'),
|
||||
channelid: channel.site_id,
|
||||
endtime: date.add(1, 'd').format('YYYYMMDD000000')
|
||||
}
|
||||
}
|
||||
},
|
||||
parser({ content }) {
|
||||
const programs = []
|
||||
const items = parseItems(content)
|
||||
items.forEach(item => {
|
||||
programs.push({
|
||||
title: item.name,
|
||||
description: item.introduce,
|
||||
image: parseImage(item),
|
||||
category: parseCategory(item),
|
||||
start: parseStart(item),
|
||||
stop: parseStop(item),
|
||||
sub_title: item.subName,
|
||||
season: item.seasonNum,
|
||||
episode: item.subNum,
|
||||
directors: parseDirectors(item),
|
||||
producers: parseProducers(item),
|
||||
adapters: parseAdapters(item),
|
||||
country: item.country?.toUpperCase(),
|
||||
date: item.producedate,
|
||||
urls: parseUrls(item)
|
||||
})
|
||||
})
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const url = 'https://api.prod.sngtv.magentatv.de/EPG/JSON/AllChannel'
|
||||
const body = {
|
||||
channelNamespace: 2,
|
||||
filterlist: [
|
||||
{
|
||||
key: 'IsHide',
|
||||
value: '-1'
|
||||
}
|
||||
],
|
||||
metaDataVer: 'Channel/1.1',
|
||||
properties: [
|
||||
{
|
||||
include: '/channellist/logicalChannel/contentId,/channellist/logicalChannel/name',
|
||||
name: 'logicalChannel'
|
||||
}
|
||||
],
|
||||
returnSatChannel: 0
|
||||
}
|
||||
const params = {
|
||||
headers: await setHeaders()
|
||||
}
|
||||
|
||||
const data = await axios
|
||||
.post(url, body, params)
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
return data.channellist.map(item => {
|
||||
return {
|
||||
lang: 'de',
|
||||
site_id: item.contentId,
|
||||
name: item.name
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function parseCategory(item) {
|
||||
return item.genres
|
||||
? item.genres
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
: []
|
||||
}
|
||||
|
||||
function parseDirectors(item) {
|
||||
if (!item.cast || !item.cast.director) return []
|
||||
return item.cast.director
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseProducers(item) {
|
||||
if (!item.cast || !item.cast.producer) return []
|
||||
return item.cast.producer
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseAdapters(item) {
|
||||
if (!item.cast || !item.cast.adaptor) return []
|
||||
return item.cast.adaptor
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseUrls(item) {
|
||||
// currently only a imdb id is returned by the api, thus we can construct the url here
|
||||
if (!item.externalIds) return []
|
||||
return JSON.parse(item.externalIds)
|
||||
.filter(externalId => externalId.type === 'imdb' && externalId.id)
|
||||
.map(externalId => ({ system: 'imdb', value: `https://www.imdb.com/title/${externalId.id}` }))
|
||||
}
|
||||
|
||||
function parseImage(item) {
|
||||
if (!Array.isArray(item.pictures) || !item.pictures.length) return null
|
||||
|
||||
return item.pictures[0].href
|
||||
}
|
||||
|
||||
function parseStart(item) {
|
||||
return dayjs.utc(item.starttime, 'YYYY-MM-DD HH:mm:ss')
|
||||
}
|
||||
|
||||
function parseStop(item) {
|
||||
return dayjs.utc(item.endtime, 'YYYY-MM-DD HH:mm:ss')
|
||||
}
|
||||
|
||||
function parseItems(content) {
|
||||
const data = JSON.parse(content)
|
||||
if (!data || !Array.isArray(data.playbilllist)) return []
|
||||
|
||||
return data.playbilllist
|
||||
}
|
||||
|
||||
async function fetchCookieAndToken() {
|
||||
// Only fetch the cookies and csrfToken if they are not already set
|
||||
if (X_CSRFTOKEN && Cookie) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.request({
|
||||
url: 'https://api.prod.sngtv.magentatv.de/EPG/JSON/Authenticate',
|
||||
params: {
|
||||
SID: 'firstup',
|
||||
T: 'Windows_chrome_118'
|
||||
},
|
||||
method: 'POST',
|
||||
data: '{"terminalid":"00:00:00:00:00:00","mac":"00:00:00:00:00:00","terminaltype":"WEBTV","utcEnable":1,"timezone":"Etc/GMT0","userType":3,"terminalvendor":"Unknown"}',
|
||||
})
|
||||
|
||||
// Extract the cookies specified in cookiesToExtract
|
||||
const setCookieHeader = response.headers['set-cookie'] || []
|
||||
const extractedCookies = []
|
||||
cookiesToExtract.forEach(cookieName => {
|
||||
const regex = new RegExp(`${cookieName}=(.+?)(;|$)`)
|
||||
const match = setCookieHeader.find(header => regex.test(header))
|
||||
|
||||
if (match) {
|
||||
const cookieString = regex.exec(match)[0]
|
||||
extractedCookies.push(cookieString)
|
||||
}
|
||||
})
|
||||
|
||||
// check if we recieved a csrfToken only then store the values
|
||||
if (!response.data.csrfToken) {
|
||||
console.log('csrfToken not found in the response.')
|
||||
return
|
||||
}
|
||||
|
||||
X_CSRFTOKEN = response.data.csrfToken
|
||||
Cookie = extractedCookies.join(' ')
|
||||
|
||||
} catch(error) {
|
||||
console.error(error)
|
||||
}
|
||||
}
|
||||
|
||||
async function setHeaders() {
|
||||
await fetchCookieAndToken()
|
||||
|
||||
return { X_CSRFTOKEN, Cookie }
|
||||
}
|
||||
const axios = require('axios')
|
||||
const dayjs = require('dayjs')
|
||||
const utc = require('dayjs/plugin/utc')
|
||||
const customParseFormat = require('dayjs/plugin/customParseFormat')
|
||||
|
||||
let X_CSRFTOKEN
|
||||
let Cookie
|
||||
const cookiesToExtract = ['JSESSIONID', 'CSESSIONID', 'CSRFSESSION']
|
||||
|
||||
dayjs.extend(utc)
|
||||
dayjs.extend(customParseFormat)
|
||||
|
||||
module.exports = {
|
||||
site: 'web.magentatv.de',
|
||||
days: 2,
|
||||
url: 'https://api.prod.sngtv.magentatv.de/EPG/JSON/PlayBillList',
|
||||
request: {
|
||||
method: 'POST',
|
||||
async headers() {
|
||||
return await setHeaders()
|
||||
},
|
||||
data({ channel, date }) {
|
||||
return {
|
||||
count: -1,
|
||||
isFillProgram: 1,
|
||||
offset: 0,
|
||||
properties: [
|
||||
{
|
||||
include:
|
||||
'endtime,genres,id,name,starttime,channelid,pictures,introduce,subName,seasonNum,subNum,cast,country,producedate,externalIds',
|
||||
name: 'playbill'
|
||||
}
|
||||
],
|
||||
type: 2,
|
||||
begintime: date.format('YYYYMMDD000000'),
|
||||
channelid: channel.site_id,
|
||||
endtime: date.add(1, 'd').format('YYYYMMDD000000')
|
||||
}
|
||||
}
|
||||
},
|
||||
parser({ content }) {
|
||||
const programs = []
|
||||
const items = parseItems(content)
|
||||
items.forEach(item => {
|
||||
programs.push({
|
||||
title: item.name,
|
||||
description: item.introduce,
|
||||
image: parseImage(item),
|
||||
category: parseCategory(item),
|
||||
start: parseStart(item),
|
||||
stop: parseStop(item),
|
||||
sub_title: item.subName,
|
||||
season: item.seasonNum,
|
||||
episode: item.subNum,
|
||||
directors: parseDirectors(item),
|
||||
producers: parseProducers(item),
|
||||
adapters: parseAdapters(item),
|
||||
country: item.country?.toUpperCase(),
|
||||
date: item.producedate,
|
||||
urls: parseUrls(item)
|
||||
})
|
||||
})
|
||||
return programs
|
||||
},
|
||||
async channels() {
|
||||
const url = 'https://api.prod.sngtv.magentatv.de/EPG/JSON/AllChannel'
|
||||
const body = {
|
||||
channelNamespace: 2,
|
||||
filterlist: [
|
||||
{
|
||||
key: 'IsHide',
|
||||
value: '-1'
|
||||
}
|
||||
],
|
||||
metaDataVer: 'Channel/1.1',
|
||||
properties: [
|
||||
{
|
||||
include: '/channellist/logicalChannel/contentId,/channellist/logicalChannel/name',
|
||||
name: 'logicalChannel'
|
||||
}
|
||||
],
|
||||
returnSatChannel: 0
|
||||
}
|
||||
const params = {
|
||||
headers: await setHeaders()
|
||||
}
|
||||
|
||||
const data = await axios
|
||||
.post(url, body, params)
|
||||
.then(r => r.data)
|
||||
.catch(console.log)
|
||||
|
||||
return data.channellist.map(item => {
|
||||
return {
|
||||
lang: 'de',
|
||||
site_id: item.contentId,
|
||||
name: item.name
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
function parseCategory(item) {
|
||||
return item.genres
|
||||
? item.genres
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
: []
|
||||
}
|
||||
|
||||
function parseDirectors(item) {
|
||||
if (!item.cast || !item.cast.director) return []
|
||||
return item.cast.director
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseProducers(item) {
|
||||
if (!item.cast || !item.cast.producer) return []
|
||||
return item.cast.producer
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseAdapters(item) {
|
||||
if (!item.cast || !item.cast.adaptor) return []
|
||||
return item.cast.adaptor
|
||||
.replace('und', ',')
|
||||
.split(',')
|
||||
.map(i => i.trim())
|
||||
}
|
||||
|
||||
function parseUrls(item) {
|
||||
// currently only a imdb id is returned by the api, thus we can construct the url here
|
||||
if (!item.externalIds) return []
|
||||
return JSON.parse(item.externalIds)
|
||||
.filter(externalId => externalId.type === 'imdb' && externalId.id)
|
||||
.map(externalId => ({ system: 'imdb', value: `https://www.imdb.com/title/${externalId.id}` }))
|
||||
}
|
||||
|
||||
function parseImage(item) {
|
||||
if (!Array.isArray(item.pictures) || !item.pictures.length) return null
|
||||
|
||||
return item.pictures[0].href
|
||||
}
|
||||
|
||||
function parseStart(item) {
|
||||
return dayjs.utc(item.starttime, 'YYYY-MM-DD HH:mm:ss')
|
||||
}
|
||||
|
||||
function parseStop(item) {
|
||||
return dayjs.utc(item.endtime, 'YYYY-MM-DD HH:mm:ss')
|
||||
}
|
||||
|
||||
function parseItems(content) {
|
||||
const data = JSON.parse(content)
|
||||
if (!data || !Array.isArray(data.playbilllist)) return []
|
||||
|
||||
return data.playbilllist
|
||||
}
|
||||
|
||||
async function fetchCookieAndToken() {
|
||||
// Only fetch the cookies and csrfToken if they are not already set
|
||||
if (X_CSRFTOKEN && Cookie) {
|
||||
return
|
||||
}
|
||||
|
||||
try {
|
||||
const response = await axios.request({
|
||||
url: 'https://api.prod.sngtv.magentatv.de/EPG/JSON/Authenticate',
|
||||
params: {
|
||||
SID: 'firstup',
|
||||
T: 'Windows_chrome_118'
|
||||
},
|
||||
method: 'POST',
|
||||
data: '{"terminalid":"00:00:00:00:00:00","mac":"00:00:00:00:00:00","terminaltype":"WEBTV","utcEnable":1,"timezone":"Etc/GMT0","userType":3,"terminalvendor":"Unknown"}',
|
||||
})
|
||||
|
||||
// Extract the cookies specified in cookiesToExtract
|
||||
const setCookieHeader = response.headers['set-cookie'] || []
|
||||
const extractedCookies = []
|
||||
cookiesToExtract.forEach(cookieName => {
|
||||
const regex = new RegExp(`${cookieName}=(.+?)(;|$)`)
|
||||
const match = setCookieHeader.find(header => regex.test(header))
|
||||
|
||||
if (match) {
|
||||
const cookieString = regex.exec(match)[0]
|
||||
extractedCookies.push(cookieString)
|
||||
}
|
||||
})
|
||||
|
||||
// check if we recieved a csrfToken only then store the values
|
||||
if (!response.data.csrfToken) {
|
||||
console.log('csrfToken not found in the response.')
|
||||
return
|
||||
}
|
||||
|
||||
X_CSRFTOKEN = response.data.csrfToken
|
||||
Cookie = extractedCookies.join(' ')
|
||||
|
||||
} catch(error) {
|
||||
console.error(error)
|
||||
}
|
||||
}
|
||||
|
||||
async function setHeaders() {
|
||||
await fetchCookieAndToken()
|
||||
|
||||
return { X_CSRFTOKEN, Cookie }
|
||||
}
|
||||
|
||||
@@ -1,83 +1,83 @@
|
||||
import { execSync } from 'child_process'
|
||||
|
||||
type ExecError = {
|
||||
status: number
|
||||
stdout: string
|
||||
}
|
||||
|
||||
describe('channels:lint', () => {
|
||||
it('will show a message if the file contains a syntax error', () => {
|
||||
try {
|
||||
const cmd = 'npm run channels:lint --- tests/__data__/input/channels_lint/error.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
"error.channels.xml\n 3:0 Element 'channel': The attribute 'lang' is required but missing.\n\n1 error(s)\n"
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if an error occurred while parsing an xml file', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/invalid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'invalid.channels.xml\n 2:6 XML declaration allowed only at the start of the document\n'
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('can test multiple files at ones', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/error.channels.xml tests/__data__/input/channels_lint/invalid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
"error.channels.xml\n 3:0 Element 'channel': The attribute 'lang' is required but missing.\n"
|
||||
)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'invalid.channels.xml\n 2:6 XML declaration allowed only at the start of the document\n'
|
||||
)
|
||||
expect((error as ExecError).stdout).toContain('2 error(s)')
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains single quotes', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/single_quotes.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain('single_quotes.channels.xml')
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'1:14 Single quotes cannot be used in attributes'
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('does not display errors if there are none', () => {
|
||||
try {
|
||||
const cmd = 'npm run channels:lint --- tests/__data__/input/channels_lint/valid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG === 'true') console.log((error as ExecError).stdout)
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
})
|
||||
import { execSync } from 'child_process'
|
||||
|
||||
interface ExecError {
|
||||
status: number
|
||||
stdout: string
|
||||
}
|
||||
|
||||
describe('channels:lint', () => {
|
||||
it('will show a message if the file contains a syntax error', () => {
|
||||
try {
|
||||
const cmd = 'npm run channels:lint --- tests/__data__/input/channels_lint/error.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
"error.channels.xml\n 3:0 Element 'channel': The attribute 'lang' is required but missing.\n\n1 error(s)\n"
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if an error occurred while parsing an xml file', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/invalid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'invalid.channels.xml\n 2:6 XML declaration allowed only at the start of the document\n'
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('can test multiple files at ones', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/error.channels.xml tests/__data__/input/channels_lint/invalid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
"error.channels.xml\n 3:0 Element 'channel': The attribute 'lang' is required but missing.\n"
|
||||
)
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'invalid.channels.xml\n 2:6 XML declaration allowed only at the start of the document\n'
|
||||
)
|
||||
expect((error as ExecError).stdout).toContain('2 error(s)')
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains single quotes', () => {
|
||||
try {
|
||||
const cmd =
|
||||
'npm run channels:lint --- tests/__data__/input/channels_lint/single_quotes.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain('single_quotes.channels.xml')
|
||||
expect((error as ExecError).stdout).toContain(
|
||||
'1:14 Single quotes cannot be used in attributes'
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
it('does not display errors if there are none', () => {
|
||||
try {
|
||||
const cmd = 'npm run channels:lint --- tests/__data__/input/channels_lint/valid.channels.xml'
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
} catch (error) {
|
||||
if (process.env.DEBUG === 'true') console.log((error as ExecError).stdout)
|
||||
process.exit(1)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,70 +1,70 @@
|
||||
import { execSync } from 'child_process'
|
||||
|
||||
type ExecError = {
|
||||
status: number
|
||||
stdout: string
|
||||
}
|
||||
|
||||
const ENV_VAR = 'cross-env DATA_DIR=tests/__data__/input/__data__'
|
||||
|
||||
describe('channels:validate', () => {
|
||||
it('will show a message if the file contains a duplicate', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/duplicate.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬─────────────┬──────┬─────────────────┬─────────┬─────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼─────────────┼──────┼─────────────────┼─────────┼─────────┤
|
||||
│ 0 │ 'duplicate' │ 'en' │ 'Bravo.us@East' │ '140' │ 'Bravo' │
|
||||
└─────────┴─────────────┴──────┴─────────────────┴─────────┴─────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains a channel with wrong channel id', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/wrong_channel_id.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬────────────────────┬──────┬────────────────────┬─────────┬─────────────────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼────────────────────┼──────┼────────────────────┼─────────┼─────────────────────┤
|
||||
│ 0 │ 'wrong_channel_id' │ 'en' │ 'CNNInternational' │ '140' │ 'CNN International' │
|
||||
└─────────┴────────────────────┴──────┴────────────────────┴─────────┴─────────────────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains a channel with wrong feed id', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/wrong_feed_id.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬─────────────────┬──────┬─────────────────┬─────────┬─────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼─────────────────┼──────┼─────────────────┼─────────┼─────────┤
|
||||
│ 0 │ 'wrong_feed_id' │ 'en' │ 'Bravo.us@West' │ '150' │ 'Bravo' │
|
||||
└─────────┴─────────────────┴──────┴─────────────────┴─────────┴─────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
})
|
||||
import { execSync } from 'child_process'
|
||||
|
||||
interface ExecError {
|
||||
status: number
|
||||
stdout: string
|
||||
}
|
||||
|
||||
const ENV_VAR = 'cross-env DATA_DIR=tests/__data__/input/__data__'
|
||||
|
||||
describe('channels:validate', () => {
|
||||
it('will show a message if the file contains a duplicate', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/duplicate.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬─────────────┬──────┬─────────────────┬─────────┬─────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼─────────────┼──────┼─────────────────┼─────────┼─────────┤
|
||||
│ 0 │ 'duplicate' │ 'en' │ 'Bravo.us@East' │ '140' │ 'Bravo' │
|
||||
└─────────┴─────────────┴──────┴─────────────────┴─────────┴─────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains a channel with wrong channel id', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/wrong_channel_id.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬────────────────────┬──────┬────────────────────┬─────────┬─────────────────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼────────────────────┼──────┼────────────────────┼─────────┼─────────────────────┤
|
||||
│ 0 │ 'wrong_channel_id' │ 'en' │ 'CNNInternational' │ '140' │ 'CNN International' │
|
||||
└─────────┴────────────────────┴──────┴────────────────────┴─────────┴─────────────────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
|
||||
it('will show a message if the file contains a channel with wrong feed id', () => {
|
||||
try {
|
||||
const cmd = `${ENV_VAR} npm run channels:validate --- tests/__data__/input/channels_validate/wrong_feed_id.channels.xml`
|
||||
const stdout = execSync(cmd, { encoding: 'utf8' })
|
||||
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
|
||||
process.exit(1)
|
||||
} catch (error) {
|
||||
expect((error as ExecError).status).toBe(1)
|
||||
expect((error as ExecError).stdout).toContain(`
|
||||
┌─────────┬─────────────────┬──────┬─────────────────┬─────────┬─────────┐
|
||||
│ (index) │ type │ lang │ xmltv_id │ site_id │ name │
|
||||
├─────────┼─────────────────┼──────┼─────────────────┼─────────┼─────────┤
|
||||
│ 0 │ 'wrong_feed_id' │ 'en' │ 'Bravo.us@West' │ '150' │ 'Bravo' │
|
||||
└─────────┴─────────────────┴──────┴─────────────────┴─────────┴─────────┘
|
||||
|
||||
1 error(s) in 1 file(s)
|
||||
`)
|
||||
}
|
||||
})
|
||||
})
|
||||
|
||||
Reference in New Issue
Block a user