mirror of
https://github.com/iptv-org/epg
synced 2025-12-18 11:27:06 -05:00
use splitted lodash modules for better efficiency
This commit is contained in:
@@ -1,41 +1,41 @@
|
||||
import { Logger, Collection, Storage } from '@freearhey/core'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { GuideChannel } from '../../models'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
const files: string[] = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
const channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList = await parser.parse(filepath)
|
||||
|
||||
channelList.channels.forEach((data: epgGrabber.Channel) => {
|
||||
channels.add(new GuideChannel(data))
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`found ${channels.count()} channel(s)`)
|
||||
|
||||
const output = channels.map((channel: GuideChannel) => channel.toJSON())
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
import { Logger, Collection, Storage } from '@freearhey/core'
|
||||
import { SITES_DIR, API_DIR } from '../../constants'
|
||||
import { GuideChannel } from '../../models'
|
||||
import { ChannelsParser } from '../../core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import path from 'path'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger()
|
||||
|
||||
logger.start('staring...')
|
||||
|
||||
logger.info('loading channels...')
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const parser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
const files: string[] = await sitesStorage.list('**/*.channels.xml')
|
||||
|
||||
const channels = new Collection()
|
||||
for (const filepath of files) {
|
||||
const channelList = await parser.parse(filepath)
|
||||
|
||||
channelList.channels.forEach((data: epgGrabber.Channel) => {
|
||||
channels.add(new GuideChannel(data))
|
||||
})
|
||||
}
|
||||
|
||||
logger.info(`found ${channels.count()} channel(s)`)
|
||||
|
||||
const output = channels.map((channel: GuideChannel) => channel.toJSON())
|
||||
|
||||
const apiStorage = new Storage(API_DIR)
|
||||
const outputFilename = 'guides.json'
|
||||
await apiStorage.save('guides.json', output.toJSON())
|
||||
|
||||
logger.info(`saved to "${path.join(API_DIR, outputFilename)}"`)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,25 +1,25 @@
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { DataLoader } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage })
|
||||
|
||||
await Promise.all([
|
||||
loader.download('blocklist.json'),
|
||||
loader.download('categories.json'),
|
||||
loader.download('channels.json'),
|
||||
loader.download('countries.json'),
|
||||
loader.download('languages.json'),
|
||||
loader.download('regions.json'),
|
||||
loader.download('subdivisions.json'),
|
||||
loader.download('feeds.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json'),
|
||||
loader.download('logos.json')
|
||||
])
|
||||
}
|
||||
|
||||
main()
|
||||
import { DATA_DIR } from '../../constants'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { DataLoader } from '../../core'
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage(DATA_DIR)
|
||||
const loader = new DataLoader({ storage })
|
||||
|
||||
await Promise.all([
|
||||
loader.download('blocklist.json'),
|
||||
loader.download('categories.json'),
|
||||
loader.download('channels.json'),
|
||||
loader.download('countries.json'),
|
||||
loader.download('languages.json'),
|
||||
loader.download('regions.json'),
|
||||
loader.download('subdivisions.json'),
|
||||
loader.download('feeds.json'),
|
||||
loader.download('timezones.json'),
|
||||
loader.download('guides.json'),
|
||||
loader.download('streams.json'),
|
||||
loader.download('logos.json')
|
||||
])
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,109 +1,109 @@
|
||||
import chalk from 'chalk'
|
||||
import { program } from 'commander'
|
||||
import { Storage, File } from '@freearhey/core'
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
import chalk from 'chalk'
|
||||
import { program } from 'commander'
|
||||
import { Storage, File } from '@freearhey/core'
|
||||
import { XmlDocument, XsdValidator, XmlValidateError, ErrorDetail } from 'libxml2-wasm'
|
||||
|
||||
const xsd = `<?xml version="1.0" encoding="UTF-8"?>
|
||||
<xs:schema xmlns:xs="http://www.w3.org/2001/XMLSchema" elementFormDefault="qualified">
|
||||
<xs:element name="channels">
|
||||
<xs:complexType>
|
||||
<xs:sequence>
|
||||
<xs:element minOccurs="0" maxOccurs="unbounded" ref="channel"/>
|
||||
</xs:sequence>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:element name="channel">
|
||||
<xs:complexType mixed="true">
|
||||
<xs:attribute use="required" ref="site"/>
|
||||
<xs:attribute use="required" ref="lang"/>
|
||||
<xs:attribute use="required" ref="site_id"/>
|
||||
<xs:attribute name="xmltv_id" use="required" type="xs:string"/>
|
||||
<xs:attribute name="logo" type="xs:string"/>
|
||||
<xs:attribute name="lcn" type="xs:string"/>
|
||||
</xs:complexType>
|
||||
</xs:element>
|
||||
<xs:attribute name="site">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="site_id">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
<xs:attribute name="lang">
|
||||
<xs:simpleType>
|
||||
<xs:restriction base="xs:string">
|
||||
<xs:minLength value="1"/>
|
||||
</xs:restriction>
|
||||
</xs:simpleType>
|
||||
</xs:attribute>
|
||||
</xs:schema>`
|
||||
|
||||
program.argument('[filepath...]', 'Path to *.channels.xml files to check').parse(process.argv)
|
||||
|
||||
async function main() {
|
||||
const storage = new Storage()
|
||||
|
||||
let errors: ErrorDetail[] = []
|
||||
|
||||
const files = program.args.length ? program.args : await storage.list('sites/**/*.channels.xml')
|
||||
for (const filepath of files) {
|
||||
const file = new File(filepath)
|
||||
if (file.extension() !== 'xml') continue
|
||||
|
||||
const xml = await storage.load(filepath)
|
||||
|
||||
let localErrors: ErrorDetail[] = []
|
||||
|
||||
try {
|
||||
const schema = XmlDocument.fromString(xsd)
|
||||
const validator = XsdValidator.fromDoc(schema)
|
||||
const doc = XmlDocument.fromString(xml)
|
||||
|
||||
validator.validate(doc)
|
||||
|
||||
schema.dispose()
|
||||
validator.dispose()
|
||||
doc.dispose()
|
||||
} catch (_error) {
|
||||
const error = _error as XmlValidateError
|
||||
|
||||
localErrors = localErrors.concat(error.details)
|
||||
}
|
||||
|
||||
xml.split('\n').forEach((line: string, lineIndex: number) => {
|
||||
const found = line.match(/='/)
|
||||
if (found) {
|
||||
const colIndex = found.index || 0
|
||||
localErrors.push({
|
||||
line: lineIndex + 1,
|
||||
col: colIndex + 1,
|
||||
message: 'Single quotes cannot be used in attributes'
|
||||
})
|
||||
}
|
||||
})
|
||||
|
||||
if (localErrors.length) {
|
||||
console.log(`\n${chalk.underline(filepath)}`)
|
||||
localErrors.forEach((error: ErrorDetail) => {
|
||||
const position = `${error.line}:${error.col}`
|
||||
console.log(` ${chalk.gray(position.padEnd(4, ' '))} ${error.message.trim()}`)
|
||||
})
|
||||
|
||||
errors = errors.concat(localErrors)
|
||||
}
|
||||
}
|
||||
|
||||
if (errors.length) {
|
||||
console.log(chalk.red(`\n${errors.length} error(s)`))
|
||||
process.exit(1)
|
||||
}
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,76 +1,76 @@
|
||||
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelList, Issue, Site } from '../../models'
|
||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const channelsParser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await sitesStorage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('putting the data together...')
|
||||
const brokenGuideReports = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'broken guide')
|
||||
)
|
||||
for (const domain of folders) {
|
||||
const filteredIssues = brokenGuideReports.filter(
|
||||
(issue: Issue) => domain === issue.data.get('site')
|
||||
)
|
||||
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await channelsParser.parse(filepath)
|
||||
|
||||
site.totalChannels += channelList.channels.count()
|
||||
site.markedChannels += channelList.channels
|
||||
.filter((channel: Channel) => channel.xmltv_id)
|
||||
.count()
|
||||
}
|
||||
|
||||
sites.add(site)
|
||||
}
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const tableData = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
tableData.add([
|
||||
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
|
||||
{ value: site.totalChannels, align: 'right' },
|
||||
{ value: site.markedChannels, align: 'right' },
|
||||
{ value: site.getStatus().emoji, align: 'center' },
|
||||
{ value: site.getIssues().all().join(', ') }
|
||||
])
|
||||
})
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const table = new HTMLTable(tableData.all(), [
|
||||
{ name: 'Site', align: 'left' },
|
||||
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
|
||||
{ name: 'Status', align: 'left' },
|
||||
{ name: 'Notes', align: 'left' }
|
||||
])
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
|
||||
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
|
||||
await rootStorage.save('SITES.md', sitesContent)
|
||||
}
|
||||
|
||||
main()
|
||||
import { IssueLoader, HTMLTable, ChannelsParser } from '../../core'
|
||||
import { Logger, Storage, Collection } from '@freearhey/core'
|
||||
import { ChannelList, Issue, Site } from '../../models'
|
||||
import { SITES_DIR, ROOT_DIR } from '../../constants'
|
||||
import { Channel } from 'epg-grabber'
|
||||
|
||||
async function main() {
|
||||
const logger = new Logger({ level: -999 })
|
||||
const issueLoader = new IssueLoader()
|
||||
const sitesStorage = new Storage(SITES_DIR)
|
||||
const sites = new Collection()
|
||||
|
||||
logger.info('loading channels...')
|
||||
const channelsParser = new ChannelsParser({
|
||||
storage: sitesStorage
|
||||
})
|
||||
|
||||
logger.info('loading list of sites')
|
||||
const folders = await sitesStorage.list('*/')
|
||||
|
||||
logger.info('loading issues...')
|
||||
const issues = await issueLoader.load()
|
||||
|
||||
logger.info('putting the data together...')
|
||||
const brokenGuideReports = issues.filter(issue =>
|
||||
issue.labels.find((label: string) => label === 'broken guide')
|
||||
)
|
||||
for (const domain of folders) {
|
||||
const filteredIssues = brokenGuideReports.filter(
|
||||
(issue: Issue) => domain === issue.data.get('site')
|
||||
)
|
||||
|
||||
const site = new Site({
|
||||
domain,
|
||||
issues: filteredIssues
|
||||
})
|
||||
|
||||
const files = await sitesStorage.list(`${domain}/*.channels.xml`)
|
||||
for (const filepath of files) {
|
||||
const channelList: ChannelList = await channelsParser.parse(filepath)
|
||||
|
||||
site.totalChannels += channelList.channels.count()
|
||||
site.markedChannels += channelList.channels
|
||||
.filter((channel: Channel) => channel.xmltv_id)
|
||||
.count()
|
||||
}
|
||||
|
||||
sites.add(site)
|
||||
}
|
||||
|
||||
logger.info('creating sites table...')
|
||||
const tableData = new Collection()
|
||||
sites.forEach((site: Site) => {
|
||||
tableData.add([
|
||||
{ value: `<a href="sites/${site.domain}">${site.domain}</a>` },
|
||||
{ value: site.totalChannels, align: 'right' },
|
||||
{ value: site.markedChannels, align: 'right' },
|
||||
{ value: site.getStatus().emoji, align: 'center' },
|
||||
{ value: site.getIssues().all().join(', ') }
|
||||
])
|
||||
})
|
||||
|
||||
logger.info('updating sites.md...')
|
||||
const table = new HTMLTable(tableData.all(), [
|
||||
{ name: 'Site', align: 'left' },
|
||||
{ name: 'Channels<br>(total / with xmltv-id)', colspan: 2, align: 'left' },
|
||||
{ name: 'Status', align: 'left' },
|
||||
{ name: 'Notes', align: 'left' }
|
||||
])
|
||||
const rootStorage = new Storage(ROOT_DIR)
|
||||
const sitesTemplate = await new Storage().load('scripts/templates/_sites.md')
|
||||
const sitesContent = sitesTemplate.replace('_TABLE_', table.toString())
|
||||
await rootStorage.save('SITES.md', sitesContent)
|
||||
}
|
||||
|
||||
main()
|
||||
|
||||
@@ -1,103 +1,103 @@
|
||||
import type { DataLoaderProps, DataLoaderData } from '../types/dataLoader'
|
||||
import cliProgress, { MultiBar } from 'cli-progress'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ApiClient } from './apiClient'
|
||||
import numeral from 'numeral'
|
||||
|
||||
export class DataLoader {
|
||||
client: ApiClient
|
||||
storage: Storage
|
||||
progressBar: MultiBar
|
||||
|
||||
constructor(props: DataLoaderProps) {
|
||||
this.client = new ApiClient()
|
||||
this.storage = props.storage
|
||||
this.progressBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? numeral(payload.speed).format('0.0 b') + '/s' : 'N/A'
|
||||
const total = numeral(params.total).format('0.0 b')
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async load(): Promise<DataLoaderData> {
|
||||
const [
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
] = await Promise.all([
|
||||
this.storage.json('countries.json'),
|
||||
this.storage.json('regions.json'),
|
||||
this.storage.json('subdivisions.json'),
|
||||
this.storage.json('languages.json'),
|
||||
this.storage.json('categories.json'),
|
||||
this.storage.json('blocklist.json'),
|
||||
this.storage.json('channels.json'),
|
||||
this.storage.json('feeds.json'),
|
||||
this.storage.json('timezones.json'),
|
||||
this.storage.json('guides.json'),
|
||||
this.storage.json('streams.json'),
|
||||
this.storage.json('logos.json')
|
||||
])
|
||||
|
||||
return {
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
}
|
||||
}
|
||||
|
||||
async download(filename: string) {
|
||||
if (!this.storage || !this.progressBar) return
|
||||
|
||||
const stream = await this.storage.createStream(filename)
|
||||
const progressBar = this.progressBar.create(0, 0, { filename })
|
||||
|
||||
this.client
|
||||
.get(filename, {
|
||||
responseType: 'stream',
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
response.data.pipe(stream)
|
||||
})
|
||||
}
|
||||
}
|
||||
import type { DataLoaderProps, DataLoaderData } from '../types/dataLoader'
|
||||
import cliProgress, { MultiBar } from 'cli-progress'
|
||||
import { Storage } from '@freearhey/core'
|
||||
import { ApiClient } from './apiClient'
|
||||
import numeral from 'numeral'
|
||||
|
||||
export class DataLoader {
|
||||
client: ApiClient
|
||||
storage: Storage
|
||||
progressBar: MultiBar
|
||||
|
||||
constructor(props: DataLoaderProps) {
|
||||
this.client = new ApiClient()
|
||||
this.storage = props.storage
|
||||
this.progressBar = new cliProgress.MultiBar({
|
||||
stopOnComplete: true,
|
||||
hideCursor: true,
|
||||
forceRedraw: true,
|
||||
barsize: 36,
|
||||
format(options, params, payload) {
|
||||
const filename = payload.filename.padEnd(18, ' ')
|
||||
const barsize = options.barsize || 40
|
||||
const percent = (params.progress * 100).toFixed(2)
|
||||
const speed = payload.speed ? numeral(payload.speed).format('0.0 b') + '/s' : 'N/A'
|
||||
const total = numeral(params.total).format('0.0 b')
|
||||
const completeSize = Math.round(params.progress * barsize)
|
||||
const incompleteSize = barsize - completeSize
|
||||
const bar =
|
||||
options.barCompleteString && options.barIncompleteString
|
||||
? options.barCompleteString.substr(0, completeSize) +
|
||||
options.barGlue +
|
||||
options.barIncompleteString.substr(0, incompleteSize)
|
||||
: '-'.repeat(barsize)
|
||||
|
||||
return `${filename} [${bar}] ${percent}% | ETA: ${params.eta}s | ${total} | ${speed}`
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
async load(): Promise<DataLoaderData> {
|
||||
const [
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
] = await Promise.all([
|
||||
this.storage.json('countries.json'),
|
||||
this.storage.json('regions.json'),
|
||||
this.storage.json('subdivisions.json'),
|
||||
this.storage.json('languages.json'),
|
||||
this.storage.json('categories.json'),
|
||||
this.storage.json('blocklist.json'),
|
||||
this.storage.json('channels.json'),
|
||||
this.storage.json('feeds.json'),
|
||||
this.storage.json('timezones.json'),
|
||||
this.storage.json('guides.json'),
|
||||
this.storage.json('streams.json'),
|
||||
this.storage.json('logos.json')
|
||||
])
|
||||
|
||||
return {
|
||||
countries,
|
||||
regions,
|
||||
subdivisions,
|
||||
languages,
|
||||
categories,
|
||||
blocklist,
|
||||
channels,
|
||||
feeds,
|
||||
timezones,
|
||||
guides,
|
||||
streams,
|
||||
logos
|
||||
}
|
||||
}
|
||||
|
||||
async download(filename: string) {
|
||||
if (!this.storage || !this.progressBar) return
|
||||
|
||||
const stream = await this.storage.createStream(filename)
|
||||
const progressBar = this.progressBar.create(0, 0, { filename })
|
||||
|
||||
this.client
|
||||
.get(filename, {
|
||||
responseType: 'stream',
|
||||
onDownloadProgress({ total, loaded, rate }) {
|
||||
if (total) progressBar.setTotal(total)
|
||||
progressBar.update(loaded, { speed: rate })
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
response.data.pipe(stream)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
export * from './apiClient'
|
||||
export * from './channelsParser'
|
||||
export * from './configLoader'
|
||||
export * from './dataLoader'
|
||||
export * from './dataProcessor'
|
||||
export * from './grabber'
|
||||
export * from './guideManager'
|
||||
export * from './htmlTable'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './job'
|
||||
export * from './proxyParser'
|
||||
export * from './queue'
|
||||
export * from './queueCreator'
|
||||
export * from './apiClient'
|
||||
export * from './channelsParser'
|
||||
export * from './configLoader'
|
||||
export * from './dataLoader'
|
||||
export * from './dataProcessor'
|
||||
export * from './grabber'
|
||||
export * from './guideManager'
|
||||
export * from './htmlTable'
|
||||
export * from './issueLoader'
|
||||
export * from './issueParser'
|
||||
export * from './job'
|
||||
export * from './proxyParser'
|
||||
export * from './queue'
|
||||
export * from './queueCreator'
|
||||
|
||||
@@ -1,37 +1,37 @@
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load(props?: { labels: string[] | string }) {
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
state: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
import { restEndpointMethods } from '@octokit/plugin-rest-endpoint-methods'
|
||||
import { paginateRest } from '@octokit/plugin-paginate-rest'
|
||||
import { TESTING, OWNER, REPO } from '../constants'
|
||||
import { Collection } from '@freearhey/core'
|
||||
import { Octokit } from '@octokit/core'
|
||||
import { IssueParser } from './'
|
||||
|
||||
const CustomOctokit = Octokit.plugin(paginateRest, restEndpointMethods)
|
||||
const octokit = new CustomOctokit()
|
||||
|
||||
export class IssueLoader {
|
||||
async load(props?: { labels: string[] | string }) {
|
||||
let labels = ''
|
||||
if (props && props.labels) {
|
||||
labels = Array.isArray(props.labels) ? props.labels.join(',') : props.labels
|
||||
}
|
||||
let issues: object[] = []
|
||||
if (TESTING) {
|
||||
issues = (await import('../../tests/__data__/input/sites_update/issues.mjs')).default
|
||||
} else {
|
||||
issues = await octokit.paginate(octokit.rest.issues.listForRepo, {
|
||||
owner: OWNER,
|
||||
repo: REPO,
|
||||
per_page: 100,
|
||||
labels,
|
||||
state: 'open',
|
||||
headers: {
|
||||
'X-GitHub-Api-Version': '2022-11-28'
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
const parser = new IssueParser()
|
||||
|
||||
return new Collection(issues).map(parser.parse)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,77 +0,0 @@
|
||||
/**
|
||||
* Sorts an array by the result of running each element through an iteratee function.
|
||||
* Creates a shallow copy of the array before sorting to avoid mutating the original.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Function} fn - The iteratee function to compute sort values
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}];
|
||||
* sortBy(users, x => x.age); // [{name: 'jane', age: 25}, {name: 'john', age: 30}]
|
||||
*/
|
||||
export const sortBy = <T>(arr: T[], fn: (item: T) => number | string): T[] =>
|
||||
[...arr].sort((a, b) => (fn(a) > fn(b) ? 1 : -1))
|
||||
|
||||
/**
|
||||
* Sorts an array by multiple criteria with customizable sort orders.
|
||||
* Supports ascending (default) and descending order for each criterion.
|
||||
*
|
||||
* @param {Array} arr - The array to sort
|
||||
* @param {Array<Function>} fns - Array of iteratee functions to compute sort values
|
||||
* @param {Array<string>} orders - Array of sort orders ('asc' or 'desc'), defaults to all 'asc'
|
||||
* @returns {Array} A new sorted array
|
||||
*
|
||||
* @example
|
||||
* const users = [{name: 'john', age: 30}, {name: 'jane', age: 25}, {name: 'bob', age: 30}];
|
||||
* orderBy(users, [x => x.age, x => x.name], ['desc', 'asc']);
|
||||
* // [{name: 'bob', age: 30}, {name: 'john', age: 30}, {name: 'jane', age: 25}]
|
||||
*/
|
||||
export const orderBy = (
|
||||
arr: unknown[],
|
||||
fns: ((item: unknown) => string | number)[],
|
||||
orders: string[] = []
|
||||
): unknown[] =>
|
||||
[...arr].sort((a, b) =>
|
||||
fns.reduce(
|
||||
(acc, fn, i) =>
|
||||
acc ||
|
||||
((orders[i] === 'desc' ? fn(b) > fn(a) : fn(a) > fn(b)) ? 1 : fn(a) === fn(b) ? 0 : -1),
|
||||
0
|
||||
)
|
||||
)
|
||||
|
||||
/**
|
||||
* Creates a duplicate-free version of an array using an iteratee function to generate
|
||||
* the criterion by which uniqueness is computed. Only the first occurrence of each
|
||||
* element is kept.
|
||||
*
|
||||
* @param {Array} arr - The array to inspect
|
||||
* @param {Function} fn - The iteratee function to compute uniqueness criterion
|
||||
* @returns {Array} A new duplicate-free array
|
||||
*
|
||||
* @example
|
||||
* const users = [{id: 1, name: 'john'}, {id: 2, name: 'jane'}, {id: 1, name: 'john'}];
|
||||
* uniqBy(users, x => x.id); // [{id: 1, name: 'john'}, {id: 2, name: 'jane'}]
|
||||
*/
|
||||
export const uniqBy = <T>(arr: T[], fn: (item: T) => unknown): T[] =>
|
||||
arr.filter((item, index) => arr.findIndex(x => fn(x) === fn(item)) === index)
|
||||
|
||||
/**
|
||||
* Converts a string to start case (capitalizes the first letter of each word).
|
||||
* Handles camelCase, snake_case, kebab-case, and regular spaces.
|
||||
*
|
||||
* @param {string} str - The string to convert
|
||||
* @returns {string} The start case string
|
||||
*
|
||||
* @example
|
||||
* startCase('hello_world'); // "Hello World"
|
||||
* startCase('helloWorld'); // "Hello World"
|
||||
* startCase('hello-world'); // "Hello World"
|
||||
* startCase('hello world'); // "Hello World"
|
||||
*/
|
||||
export const startCase = (str: string): string =>
|
||||
str
|
||||
.replace(/([a-z])([A-Z])/g, '$1 $2') // Split camelCase
|
||||
.replace(/[_-]/g, ' ') // Replace underscores and hyphens with spaces
|
||||
.replace(/\b\w/g, c => c.toUpperCase()) // Capitalize first letter of each word
|
||||
@@ -1 +0,0 @@
|
||||
export * from './functions'
|
||||
@@ -1,59 +1,59 @@
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Feed, Channel } from '.'
|
||||
|
||||
export class GuideChannel {
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
xmltvId?: string
|
||||
languageCode?: string
|
||||
siteId?: string
|
||||
logoUrl?: string
|
||||
siteDomain?: string
|
||||
siteName?: string
|
||||
|
||||
constructor(data: epgGrabber.Channel) {
|
||||
const [channelId, feedId] = data.xmltv_id ? data.xmltv_id.split('@') : [undefined, undefined]
|
||||
|
||||
this.channelId = channelId
|
||||
this.feedId = feedId
|
||||
this.xmltvId = data.xmltv_id
|
||||
this.languageCode = data.lang
|
||||
this.siteId = data.site_id
|
||||
this.logoUrl = data.logo
|
||||
this.siteDomain = data.site
|
||||
this.siteName = data.name
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
if (this.channelId) this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (this.feedId) this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channelId || null,
|
||||
feed: this.feedId || null,
|
||||
site: this.siteDomain || '',
|
||||
site_id: this.siteId || '',
|
||||
site_name: this.siteName || '',
|
||||
lang: this.languageCode || ''
|
||||
}
|
||||
}
|
||||
}
|
||||
import { Dictionary } from '@freearhey/core'
|
||||
import epgGrabber from 'epg-grabber'
|
||||
import { Feed, Channel } from '.'
|
||||
|
||||
export class GuideChannel {
|
||||
channelId?: string
|
||||
channel?: Channel
|
||||
feedId?: string
|
||||
feed?: Feed
|
||||
xmltvId?: string
|
||||
languageCode?: string
|
||||
siteId?: string
|
||||
logoUrl?: string
|
||||
siteDomain?: string
|
||||
siteName?: string
|
||||
|
||||
constructor(data: epgGrabber.Channel) {
|
||||
const [channelId, feedId] = data.xmltv_id ? data.xmltv_id.split('@') : [undefined, undefined]
|
||||
|
||||
this.channelId = channelId
|
||||
this.feedId = feedId
|
||||
this.xmltvId = data.xmltv_id
|
||||
this.languageCode = data.lang
|
||||
this.siteId = data.site_id
|
||||
this.logoUrl = data.logo
|
||||
this.siteDomain = data.site
|
||||
this.siteName = data.name
|
||||
}
|
||||
|
||||
withChannel(channelsKeyById: Dictionary): this {
|
||||
if (this.channelId) this.channel = channelsKeyById.get(this.channelId)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
withFeed(feedsKeyByStreamId: Dictionary): this {
|
||||
if (this.feedId) this.feed = feedsKeyByStreamId.get(this.getStreamId())
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
getStreamId(): string {
|
||||
if (!this.channelId) return ''
|
||||
if (!this.feedId) return this.channelId
|
||||
|
||||
return `${this.channelId}@${this.feedId}`
|
||||
}
|
||||
|
||||
toJSON() {
|
||||
return {
|
||||
channel: this.channelId || null,
|
||||
feed: this.feedId || null,
|
||||
site: this.siteDomain || '',
|
||||
site_id: this.siteId || '',
|
||||
site_name: this.siteName || '',
|
||||
lang: this.languageCode || ''
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +1,9 @@
|
||||
export * from './channel'
|
||||
export * from './feed'
|
||||
export * from './guide'
|
||||
export * from './guideChannel'
|
||||
export * from './issue'
|
||||
export * from './logo'
|
||||
export * from './site'
|
||||
export * from './stream'
|
||||
export * from './channelList'
|
||||
export * from './channel'
|
||||
export * from './feed'
|
||||
export * from './guide'
|
||||
export * from './guideChannel'
|
||||
export * from './issue'
|
||||
export * from './logo'
|
||||
export * from './site'
|
||||
export * from './stream'
|
||||
export * from './channelList'
|
||||
|
||||
Reference in New Issue
Block a user