Update scripts

This commit is contained in:
freearhey
2025-10-08 21:25:22 +03:00
parent 25fa704e14
commit ad2c83e333
73 changed files with 3215 additions and 4784 deletions

View File

@@ -1,217 +1,190 @@
import { Storage, Collection, Logger, Dictionary } from '@freearhey/core'
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
import type { ChannelSearchableData } from '../../types/channel'
import { Channel, Feed, Playlist, Stream } from '../../models'
import { DataProcessorData } from '../../types/dataProcessor'
import { DataLoaderData } from '../../types/dataLoader'
import { select, input } from '@inquirer/prompts'
import { DATA_DIR } from '../../constants'
import nodeCleanup from 'node-cleanup'
import sjs from '@freearhey/search-js'
import { Command } from 'commander'
import readline from 'readline'
type ChoiceValue = { type: string; value?: Feed | Channel }
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
if (process.platform === 'win32') {
readline
.createInterface({
input: process.stdin,
output: process.stdout
})
.on('SIGINT', function () {
process.emit('SIGINT')
})
}
const program = new Command()
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
const filepath = program.args[0]
const logger = new Logger()
const storage = new Storage()
let parsedStreams = new Collection()
main(filepath)
nodeCleanup(() => {
save(filepath)
})
export default async function main(filepath: string) {
if (!(await storage.exists(filepath))) {
throw new Error(`File "${filepath}" does not exists`)
}
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const {
channels,
channelsKeyById,
feedsGroupedByChannelId,
logosGroupedByStreamId
}: DataProcessorData = processor.process(data)
logger.info('loading streams...')
const parser = new PlaylistParser({
storage,
feedsGroupedByChannelId,
logosGroupedByStreamId,
channelsKeyById
})
parsedStreams = await parser.parseFile(filepath)
const streamsWithoutId = parsedStreams.filter((stream: Stream) => !stream.id)
logger.info(
`found ${parsedStreams.count()} streams (including ${streamsWithoutId.count()} without ID)`
)
logger.info('creating search index...')
const items = channels.map((channel: Channel) => channel.getSearchable()).all()
const searchIndex = sjs.createIndex(items, {
searchable: ['name', 'altNames', 'guideNames', 'streamTitles', 'feedFullNames']
})
logger.info('starting...\n')
for (const stream of streamsWithoutId.all()) {
try {
stream.id = await selectChannel(stream, searchIndex, feedsGroupedByChannelId, channelsKeyById)
} catch (err) {
logger.info(err.message)
break
}
}
streamsWithoutId.forEach((stream: Stream) => {
if (stream.id === '-') {
stream.id = ''
}
})
}
async function selectChannel(
stream: Stream,
searchIndex,
feedsGroupedByChannelId: Dictionary,
channelsKeyById: Dictionary
): Promise<string> {
const query = escapeRegex(stream.getTitle())
const similarChannels = searchIndex
.search(query)
.map((item: ChannelSearchableData) => channelsKeyById.get(item.id))
const url = stream.url.length > 50 ? stream.url.slice(0, 50) + '...' : stream.url
const selected: ChoiceValue = await select({
message: `Select channel ID for "${stream.title}" (${url}):`,
choices: getChannelChoises(new Collection(similarChannels)),
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type': {
const typedChannelId = await input({ message: ' Channel ID:' })
if (!typedChannelId) return ''
const selectedFeedId = await selectFeed(typedChannelId, feedsGroupedByChannelId)
if (selectedFeedId === '-') return typedChannelId
return [typedChannelId, selectedFeedId].join('@')
}
case 'channel': {
const selectedChannel = selected.value
if (!selectedChannel) return ''
const selectedFeedId = await selectFeed(selectedChannel.id, feedsGroupedByChannelId)
if (selectedFeedId === '-') return selectedChannel.id
return [selectedChannel.id, selectedFeedId].join('@')
}
}
return ''
}
async function selectFeed(channelId: string, feedsGroupedByChannelId: Dictionary): Promise<string> {
const channelFeeds = new Collection(feedsGroupedByChannelId.get(channelId))
const choices = getFeedChoises(channelFeeds)
const selected: ChoiceValue = await select({
message: `Select feed ID for "${channelId}":`,
choices,
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type':
return await input({ message: ' Feed ID:', default: 'SD' })
case 'feed':
const selectedFeed = selected.value
if (!selectedFeed) return ''
return selectedFeed.id
}
return ''
}
function getChannelChoises(channels: Collection): Choice[] {
const choises: Choice[] = []
channels.forEach((channel: Channel) => {
const names = new Collection([channel.name, ...channel.altNames.all()]).uniq().join(', ')
choises.push({
value: {
type: 'channel',
value: channel
},
name: `${channel.id} (${names})`,
short: `${channel.id}`
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function getFeedChoises(feeds: Collection): Choice[] {
const choises: Choice[] = []
feeds.forEach((feed: Feed) => {
let name = `${feed.id} (${feed.name})`
if (feed.isMain) name += ' [main]'
choises.push({
value: {
type: 'feed',
value: feed
},
default: feed.isMain,
name,
short: feed.id
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function save(filepath: string) {
if (!storage.existsSync(filepath)) return
const playlist = new Playlist(parsedStreams)
storage.saveSync(filepath, playlist.toString())
logger.info(`\nFile '${filepath}' successfully saved`)
}
function escapeRegex(string: string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
}
import { loadData, data, searchChannels } from '../../api'
import { Collection, Logger } from '@freearhey/core'
import { select, input } from '@inquirer/prompts'
import { Playlist, Stream } from '../../models'
import { Storage } from '@freearhey/storage-js'
import { PlaylistParser } from '../../core'
import nodeCleanup from 'node-cleanup'
import * as sdk from '@iptv-org/sdk'
import { truncate } from '../../utils'
import { Command } from 'commander'
import readline from 'readline'
type ChoiceValue = { type: string; value?: sdk.Models.Feed | sdk.Models.Channel }
type Choice = { name: string; short?: string; value: ChoiceValue; default?: boolean }
if (process.platform === 'win32') {
readline
.createInterface({
input: process.stdin,
output: process.stdout
})
.on('SIGINT', function () {
process.emit('SIGINT')
})
}
const program = new Command()
program.argument('<filepath>', 'Path to *.channels.xml file to edit').parse(process.argv)
const filepath = program.args[0]
const logger = new Logger()
const storage = new Storage()
let parsedStreams = new Collection<Stream>()
main(filepath)
nodeCleanup(() => {
save(filepath)
})
export default async function main(filepath: string) {
if (!(await storage.exists(filepath))) {
throw new Error(`File "${filepath}" does not exists`)
}
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const parser = new PlaylistParser({
storage
})
parsedStreams = await parser.parseFile(filepath)
const streamsWithoutId = parsedStreams.filter((stream: Stream) => !stream.tvgId)
logger.info(
`found ${parsedStreams.count()} streams (including ${streamsWithoutId.count()} without ID)`
)
logger.info('starting...\n')
for (const stream of streamsWithoutId.all()) {
try {
stream.tvgId = await selectChannel(stream)
} catch (err) {
logger.info(err.message)
break
}
}
streamsWithoutId.forEach((stream: Stream) => {
if (stream.channel === '-') {
stream.channel = ''
}
})
}
async function selectChannel(stream: Stream): Promise<string> {
const query = escapeRegex(stream.title)
const similarChannels = searchChannels(query)
const url = truncate(stream.url, 50)
const selected: ChoiceValue = await select({
message: `Select channel ID for "${stream.title}" (${url}):`,
choices: getChannelChoises(similarChannels),
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type': {
const typedChannelId = await input({ message: ' Channel ID:' })
if (!typedChannelId) return ''
const selectedFeedId = await selectFeed(typedChannelId)
if (selectedFeedId === '-') return typedChannelId
return [typedChannelId, selectedFeedId].join('@')
}
case 'channel': {
const selectedChannel = selected.value
if (!selectedChannel) return ''
const selectedFeedId = await selectFeed(selectedChannel.id)
if (selectedFeedId === '-') return selectedChannel.id
return [selectedChannel.id, selectedFeedId].join('@')
}
}
return ''
}
async function selectFeed(channelId: string): Promise<string> {
const channelFeeds = new Collection(data.feedsGroupedByChannel.get(channelId))
const choices = getFeedChoises(channelFeeds)
const selected: ChoiceValue = await select({
message: `Select feed ID for "${channelId}":`,
choices,
pageSize: 10
})
switch (selected.type) {
case 'skip':
return '-'
case 'type':
return await input({ message: ' Feed ID:', default: 'SD' })
case 'feed':
const selectedFeed = selected.value
if (!selectedFeed) return ''
return selectedFeed.id
}
return ''
}
function getChannelChoises(channels: Collection<sdk.Models.Channel>): Choice[] {
const choises: Choice[] = []
channels.forEach((channel: sdk.Models.Channel) => {
const names = new Collection([channel.name, ...channel.alt_names]).uniq().join(', ')
choises.push({
value: {
type: 'channel',
value: channel
},
name: `${channel.id} (${names})`,
short: `${channel.id}`
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function getFeedChoises(feeds: Collection<sdk.Models.Feed>): Choice[] {
const choises: Choice[] = []
feeds.forEach((feed: sdk.Models.Feed) => {
let name = `${feed.id} (${feed.name})`
if (feed.is_main) name += ' [main]'
choises.push({
value: {
type: 'feed',
value: feed
},
default: feed.is_main,
name,
short: feed.id
})
})
choises.push({ name: 'Type...', value: { type: 'type' } })
choises.push({ name: 'Skip', value: { type: 'skip' } })
return choises
}
function save(filepath: string) {
if (!storage.existsSync(filepath)) return
const playlist = new Playlist(parsedStreams)
storage.saveSync(filepath, playlist.toString())
logger.info(`\nFile '${filepath}' successfully saved`)
}
function escapeRegex(string: string) {
return string.replace(/[/\-\\^$*+?.()|[\]{}]/g, '\\$&')
}

View File

@@ -1,78 +1,84 @@
import { Logger, Storage } from '@freearhey/core'
import { STREAMS_DIR, DATA_DIR } from '../../constants'
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
import { Stream, Playlist } from '../../models'
import { program } from 'commander'
import { DataLoaderData } from '../../types/dataLoader'
import { DataProcessorData } from '../../types/dataProcessor'
import path from 'node:path'
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
processor.process(data)
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage,
channelsKeyById,
feedsGroupedByChannelId,
logosGroupedByStreamId
})
let files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
files = files.map((filepath: string) => path.basename(filepath))
let streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
logger.info('normalizing links...')
streams = streams.map(stream => {
stream.normalizeURL()
return stream
})
logger.info('removing duplicates...')
streams = streams.uniqBy(stream => stream.url)
logger.info('removing wrong id...')
streams = streams.map((stream: Stream) => {
if (!stream.channel || channelsKeyById.missing(stream.channel.id)) {
stream.id = ''
}
return stream
})
logger.info('sorting links...')
streams = streams.orderBy(
[
(stream: Stream) => stream.title,
(stream: Stream) => stream.getVerticalResolution(),
(stream: Stream) => stream.getLabel(),
(stream: Stream) => stream.url
],
['asc', 'desc', 'asc', 'asc']
)
logger.info('saving...')
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
const streams = groupedStreams.get(filepath) || []
if (!streams.length) return
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
}
main()
import { Collection, Logger } from '@freearhey/core'
import { Stream, Playlist } from '../../models'
import { Storage } from '@freearhey/storage-js'
import { STREAMS_DIR } from '../../constants'
import { PlaylistParser } from '../../core'
import { loadData } from '../../api'
import { program } from 'commander'
import path from 'node:path'
program.argument('[filepath...]', 'Path to file to format').parse(process.argv)
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage
})
let files = program.args.length ? program.args : await streamsStorage.list('**/*.m3u')
files = files.map((filepath: string) => path.basename(filepath))
let streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
logger.info('normalizing links...')
streams = streams.map(stream => {
stream.normalizeURL()
return stream
})
logger.info('removing duplicates...')
streams = streams.uniqBy(stream => stream.url)
logger.info('removing wrong id...')
streams = streams.map((stream: Stream) => {
const channel = stream.getChannel()
if (channel) return stream
stream.tvgId = ''
stream.channel = ''
stream.feed = ''
return stream
})
logger.info('adding the missing feed id...')
streams = streams.map((stream: Stream) => {
const feed = stream.getFeed()
if (feed) {
stream.feed = feed.id
stream.tvgId = stream.getId()
}
return stream
})
logger.info('sorting links...')
streams = streams.sortBy(
[
(stream: Stream) => stream.title,
(stream: Stream) => stream.getVerticalResolution(),
(stream: Stream) => stream.label,
(stream: Stream) => stream.url
],
['asc', 'desc', 'asc', 'asc']
)
logger.info('saving...')
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
const streams = new Collection(groupedStreams.get(filepath))
if (streams.isEmpty()) return
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
}
main()

View File

@@ -1,131 +1,115 @@
import { PlaylistParser, DataProcessor, DataLoader } from '../../core'
import type { DataProcessorData } from '../../types/dataProcessor'
import { DATA_DIR, LOGS_DIR, STREAMS_DIR } from '../../constants'
import type { DataLoaderData } from '../../types/dataLoader'
import { Logger, Storage, File } from '@freearhey/core'
import { Stream } from '../../models'
import uniqueId from 'lodash.uniqueid'
import {
IndexCategoryGenerator,
IndexLanguageGenerator,
IndexCountryGenerator,
SubdivisionsGenerator,
CategoriesGenerator,
CountriesGenerator,
LanguagesGenerator,
RegionsGenerator,
SourcesGenerator,
CitiesGenerator,
IndexGenerator,
RawGenerator
} from '../../generators'
async function main() {
const logger = new Logger()
const logFile = new File('generators.log')
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const {
feedsGroupedByChannelId,
logosGroupedByStreamId,
channelsKeyById,
subdivisions,
categories,
countries,
regions,
cities
}: DataProcessorData = processor.process(data)
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage,
feedsGroupedByChannelId,
logosGroupedByStreamId,
channelsKeyById
})
const files = await streamsStorage.list('**/*.m3u')
let streams = await parser.parse(files)
const totalStreams = streams.count()
logger.info(`found ${totalStreams} streams`)
logger.info('generating raw/...')
await new RawGenerator({ streams, logFile }).generate()
logger.info('filtering streams...')
streams = streams.uniqBy((stream: Stream) =>
stream.hasId() ? stream.getChannelId() + stream.getFeedId() : uniqueId()
)
logger.info('sorting streams...')
streams = streams.orderBy(
[
(stream: Stream) => stream.getId(),
(stream: Stream) => stream.getVerticalResolution(),
(stream: Stream) => stream.getLabel()
],
['asc', 'asc', 'desc']
)
logger.info('generating categories/...')
await new CategoriesGenerator({ categories, streams, logFile }).generate()
logger.info('generating languages/...')
await new LanguagesGenerator({ streams, logFile }).generate()
logger.info('generating countries/...')
await new CountriesGenerator({
countries,
streams,
logFile
}).generate()
logger.info('generating subdivisions/...')
await new SubdivisionsGenerator({
subdivisions,
streams,
logFile
}).generate()
logger.info('generating cities/...')
await new CitiesGenerator({
cities,
streams,
logFile
}).generate()
logger.info('generating regions/...')
await new RegionsGenerator({
streams,
regions,
logFile
}).generate()
logger.info('generating sources/...')
await new SourcesGenerator({ streams, logFile }).generate()
logger.info('generating index.m3u...')
await new IndexGenerator({ streams, logFile }).generate()
logger.info('generating index.category.m3u...')
await new IndexCategoryGenerator({ streams, logFile }).generate()
logger.info('generating index.country.m3u...')
await new IndexCountryGenerator({
streams,
logFile
}).generate()
logger.info('generating index.language.m3u...')
await new IndexLanguageGenerator({ streams, logFile }).generate()
logger.info('saving generators.log...')
const logStorage = new Storage(LOGS_DIR)
logStorage.saveFile(logFile)
}
main()
import { LOGS_DIR, STREAMS_DIR } from '../../constants'
import { Storage, File } from '@freearhey/storage-js'
import { PlaylistParser } from '../../core'
import { loadData, data } from '../../api'
import { Logger } from '@freearhey/core'
import uniqueId from 'lodash.uniqueid'
import { Stream } from '../../models'
import {
IndexCategoryGenerator,
IndexLanguageGenerator,
IndexCountryGenerator,
SubdivisionsGenerator,
CategoriesGenerator,
CountriesGenerator,
LanguagesGenerator,
RegionsGenerator,
SourcesGenerator,
CitiesGenerator,
IndexGenerator,
RawGenerator
} from '../../generators'
async function main() {
const logger = new Logger()
const logFile = new File('generators.log')
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage
})
const files = await streamsStorage.list('**/*.m3u')
let streams = await parser.parse(files)
const totalStreams = streams.count()
logger.info(`found ${totalStreams} streams`)
logger.info('generating raw/...')
await new RawGenerator({ streams, logFile }).generate()
logger.info('filtering streams...')
streams = streams.uniqBy((stream: Stream) => stream.getId() || uniqueId())
logger.info('sorting streams...')
streams = streams.sortBy(
[
(stream: Stream) => stream.getId(),
(stream: Stream) => stream.getVerticalResolution(),
(stream: Stream) => stream.label
],
['asc', 'asc', 'desc']
)
const { categories, countries, subdivisions, cities, regions } = data
logger.info('generating categories/...')
await new CategoriesGenerator({ categories, streams, logFile }).generate()
logger.info('generating languages/...')
await new LanguagesGenerator({ streams, logFile }).generate()
logger.info('generating countries/...')
await new CountriesGenerator({
countries,
streams,
logFile
}).generate()
logger.info('generating subdivisions/...')
await new SubdivisionsGenerator({
subdivisions,
streams,
logFile
}).generate()
logger.info('generating cities/...')
await new CitiesGenerator({
cities,
streams,
logFile
}).generate()
logger.info('generating regions/...')
await new RegionsGenerator({
streams,
regions,
logFile
}).generate()
logger.info('generating sources/...')
await new SourcesGenerator({ streams, logFile }).generate()
logger.info('generating index.m3u...')
await new IndexGenerator({ streams, logFile }).generate()
logger.info('generating index.category.m3u...')
await new IndexCategoryGenerator({ streams, logFile }).generate()
logger.info('generating index.country.m3u...')
await new IndexCountryGenerator({
streams,
logFile
}).generate()
logger.info('generating index.language.m3u...')
await new IndexLanguageGenerator({ streams, logFile }).generate()
logger.info('saving generators.log...')
const logStorage = new Storage(LOGS_DIR)
logStorage.saveFile(logFile)
}
main()

View File

@@ -1,182 +1,177 @@
import { Logger, Storage, Collection } from '@freearhey/core'
import { ROOT_DIR, STREAMS_DIR, DATA_DIR } from '../../constants'
import { PlaylistParser, StreamTester, CliTable, DataProcessor, DataLoader } from '../../core'
import type { TestResult } from '../../core/streamTester'
import { Stream } from '../../models'
import { program, OptionValues } from 'commander'
import { eachLimit } from 'async-es'
import chalk from 'chalk'
import os from 'node:os'
import dns from 'node:dns'
import type { DataLoaderData } from '../../types/dataLoader'
import type { DataProcessorData } from '../../types/dataProcessor'
const LIVE_UPDATE_INTERVAL = 5000
const LIVE_UPDATE_MAX_STREAMS = 100
let errors = 0
let warnings = 0
const results: { [key: string]: string } = {}
let interval: string | number | NodeJS.Timeout | undefined
let streams = new Collection()
let isLiveUpdateEnabled = true
program
.argument('[filepath...]', 'Path to file to test')
.option(
'-p, --parallel <number>',
'Batch size of streams to test concurrently',
(value: string) => parseInt(value),
os.cpus().length
)
.option('-x, --proxy <url>', 'Use the specified proxy')
.option(
'-t, --timeout <number>',
'The number of milliseconds before the request will be aborted',
(value: string) => parseInt(value),
30000
)
.parse(process.argv)
const options: OptionValues = program.opts()
const logger = new Logger()
const tester = new StreamTester({ options })
async function main() {
if (await isOffline()) {
logger.error(chalk.red('Internet connection is required for the script to work'))
return
}
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
processor.process(data)
logger.info('loading streams...')
const rootStorage = new Storage(ROOT_DIR)
const parser = new PlaylistParser({
storage: rootStorage,
channelsKeyById,
feedsGroupedByChannelId,
logosGroupedByStreamId
})
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
logger.info('starting...')
if (!isLiveUpdateEnabled) {
drawTable()
interval = setInterval(() => {
drawTable()
}, LIVE_UPDATE_INTERVAL)
}
await eachLimit(
streams.all(),
options.parallel,
async (stream: Stream) => {
await runTest(stream)
if (isLiveUpdateEnabled) {
drawTable()
}
},
onFinish
)
}
main()
async function runTest(stream: Stream) {
const key = stream.filepath + stream.getId() + stream.url
results[key] = chalk.white('LOADING...')
const result: TestResult = await tester.test(stream)
let status = ''
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
if (result.status.ok) status = chalk.green('OK')
else if (errorStatusCodes.includes(result.status.code)) {
status = chalk.red(result.status.code)
errors++
} else {
status = chalk.yellow(result.status.code)
warnings++
}
results[key] = status
}
function drawTable() {
process.stdout.write('\u001b[3J\u001b[1J')
console.clear()
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
for (const filepath of streamsGrouped.keys()) {
const streams: Stream[] = streamsGrouped.get(filepath)
const table = new CliTable({
columns: [
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
]
})
streams.forEach((stream: Stream, index: number) => {
const status = results[stream.filepath + stream.getId() + stream.url] || chalk.gray('PENDING')
const row = {
'': index,
'tvg-id': stream.getId().length > 25 ? stream.getId().slice(0, 22) + '...' : stream.getId(),
url: stream.url.length > 100 ? stream.url.slice(0, 97) + '...' : stream.url,
status
}
table.append(row)
})
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
process.stdout.write(table.toString())
}
}
function onFinish(error: any) {
clearInterval(interval)
if (error) {
console.error(error)
process.exit(1)
}
drawTable()
if (errors > 0 || warnings > 0) {
console.log(
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
)
if (errors > 0) {
process.exit(1)
}
}
process.exit(0)
}
async function isOffline() {
return new Promise((resolve, reject) => {
dns.lookup('info.cern.ch', err => {
if (err) resolve(true)
reject(false)
})
}).catch(() => {})
}
import { PlaylistParser, StreamTester, CliTable } from '../../core'
import type { TestResult } from '../../core/streamTester'
import { ROOT_DIR, STREAMS_DIR } from '../../constants'
import { Logger, Collection } from '@freearhey/core'
import { program, OptionValues } from 'commander'
import { Storage } from '@freearhey/storage-js'
import { Stream } from '../../models'
import { loadData } from '../../api'
import { eachLimit } from 'async'
import dns from 'node:dns'
import chalk from 'chalk'
import os from 'node:os'
import { truncate } from '../../utils'
const LIVE_UPDATE_INTERVAL = 5000
const LIVE_UPDATE_MAX_STREAMS = 100
let errors = 0
let warnings = 0
const results: { [key: string]: string } = {}
let interval: string | number | NodeJS.Timeout | undefined
let streams = new Collection<Stream>()
let isLiveUpdateEnabled = true
program
.argument('[filepath...]', 'Path to file to test')
.option(
'-p, --parallel <number>',
'Batch size of streams to test concurrently',
(value: string) => parseInt(value),
os.cpus().length
)
.option('-x, --proxy <url>', 'Use the specified proxy')
.option(
'-t, --timeout <number>',
'The number of milliseconds before the request will be aborted',
(value: string) => parseInt(value),
30000
)
.parse(process.argv)
const options: OptionValues = program.opts()
const logger = new Logger()
const tester = new StreamTester({ options })
async function main() {
if (await isOffline()) {
logger.error(chalk.red('Internet connection is required for the script to work'))
return
}
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const rootStorage = new Storage(ROOT_DIR)
const parser = new PlaylistParser({
storage: rootStorage
})
const files = program.args.length ? program.args : await rootStorage.list(`${STREAMS_DIR}/*.m3u`)
streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
if (streams.count() > LIVE_UPDATE_MAX_STREAMS) isLiveUpdateEnabled = false
logger.info('starting...')
if (!isLiveUpdateEnabled) {
drawTable()
interval = setInterval(() => {
drawTable()
}, LIVE_UPDATE_INTERVAL)
}
eachLimit(
streams.all(),
options.parallel,
async (stream: Stream) => {
await runTest(stream)
if (isLiveUpdateEnabled) {
drawTable()
}
},
onFinish
)
}
main()
async function runTest(stream: Stream) {
const key = stream.getUniqKey()
results[key] = chalk.white('LOADING...')
const result: TestResult = await tester.test(stream)
let status = ''
const errorStatusCodes = ['ENOTFOUND', 'HTTP_404_NOT_FOUND']
if (result.status.ok) status = chalk.green('OK')
else if (errorStatusCodes.includes(result.status.code)) {
status = chalk.red(result.status.code)
errors++
} else {
status = chalk.yellow(result.status.code)
warnings++
}
results[key] = status
}
function drawTable() {
process.stdout.write('\u001b[3J\u001b[1J')
console.clear()
const streamsGrouped = streams.groupBy((stream: Stream) => stream.filepath)
for (const filepath of streamsGrouped.keys()) {
const streams: Stream[] = streamsGrouped.get(filepath) || []
const table = new CliTable({
columns: [
{ name: '', alignment: 'center', minLen: 3, maxLen: 3 },
{ name: 'tvg-id', alignment: 'left', color: 'green', minLen: 25, maxLen: 25 },
{ name: 'url', alignment: 'left', color: 'green', minLen: 100, maxLen: 100 },
{ name: 'status', alignment: 'left', minLen: 25, maxLen: 25 }
]
})
streams.forEach((stream: Stream, index: number) => {
const key = stream.getUniqKey()
const status = results[key] || chalk.gray('PENDING')
const tvgId = stream.getTvgId()
const row = {
'': index,
'tvg-id': truncate(tvgId, 25),
url: truncate(stream.url, 100),
status
}
table.append(row)
})
process.stdout.write(`\n${chalk.underline(filepath)}\n`)
process.stdout.write(table.toString())
}
}
function onFinish(error: Error) {
clearInterval(interval)
if (error) {
console.error(error)
process.exit(1)
}
drawTable()
if (errors > 0 || warnings > 0) {
console.log(
chalk.red(`\n${errors + warnings} problems (${errors} errors, ${warnings} warnings)`)
)
if (errors > 0) {
process.exit(1)
}
}
process.exit(0)
}
async function isOffline() {
return new Promise((resolve, reject) => {
dns.lookup('info.cern.ch', err => {
if (err) resolve(true)
reject(false)
})
}).catch(() => {})
}

View File

@@ -1,194 +1,174 @@
import { DataLoader, DataProcessor, IssueLoader, PlaylistParser } from '../../core'
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
import type { DataProcessorData } from '../../types/dataProcessor'
import { Stream, Playlist, Channel, Issue } from '../../models'
import type { DataLoaderData } from '../../types/dataLoader'
import { DATA_DIR, STREAMS_DIR } from '../../constants'
import { isURI } from '../../utils'
const processedIssues = new Collection()
async function main() {
const logger = new Logger({ level: -999 })
const issueLoader = new IssueLoader()
logger.info('loading issues...')
const issues = await issueLoader.load()
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const dataLoader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await dataLoader.load()
const { channelsKeyById, feedsGroupedByChannelId, logosGroupedByStreamId }: DataProcessorData =
processor.process(data)
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage,
feedsGroupedByChannelId,
logosGroupedByStreamId,
channelsKeyById
})
const files = await streamsStorage.list('**/*.m3u')
const streams = await parser.parse(files)
logger.info('removing streams...')
await removeStreams({ streams, issues })
logger.info('edit stream description...')
await editStreams({
streams,
issues,
channelsKeyById,
feedsGroupedByChannelId
})
logger.info('add new streams...')
await addStreams({
streams,
issues,
channelsKeyById,
feedsGroupedByChannelId
})
logger.info('saving...')
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
let streams = groupedStreams.get(filepath) || []
streams = streams.filter((stream: Stream) => stream.removed === false)
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
console.log(`OUTPUT=${output}`)
}
main()
async function removeStreams({ streams, issues }: { streams: Collection; issues: Collection }) {
const requests = issues.filter(
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamUrl')) return
const streamUrls = data.getString('streamUrl') || ''
let changed = false
streamUrls
.split(/\r?\n/)
.filter(Boolean)
.forEach(link => {
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
if (found) {
found.removed = true
changed = true
}
})
if (changed) processedIssues.add(issue.number)
})
}
async function editStreams({
streams,
issues,
channelsKeyById,
feedsGroupedByChannelId
}: {
streams: Collection
issues: Collection
channelsKeyById: Dictionary
feedsGroupedByChannelId: Dictionary
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamUrl')) return
const stream: Stream = streams.first(
(_stream: Stream) => _stream.url === data.getString('streamUrl')
)
if (!stream) return
const streamId = data.getString('streamId') || ''
const [channelId, feedId] = streamId.split('@')
if (channelId) {
stream
.setChannelId(channelId)
.setFeedId(feedId)
.withChannel(channelsKeyById)
.withFeed(feedsGroupedByChannelId)
.updateId()
.updateTitle()
.updateFilepath()
}
stream.update(data)
processedIssues.add(issue.number)
})
}
async function addStreams({
streams,
issues,
channelsKeyById,
feedsGroupedByChannelId
}: {
streams: Collection
issues: Collection
channelsKeyById: Dictionary
feedsGroupedByChannelId: Dictionary
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamId') || data.missing('streamUrl')) return
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
const streamUrl = data.getString('streamUrl') || ''
if (!isURI(streamUrl)) return
const streamId = data.getString('streamId') || ''
const [channelId, feedId] = streamId.split('@')
const channel: Channel = channelsKeyById.get(channelId)
if (!channel) return
const label = data.getString('label') || null
const quality = data.getString('quality') || null
const httpUserAgent = data.getString('httpUserAgent') || null
const httpReferrer = data.getString('httpReferrer') || null
const directives = data.getArray('directives') || []
const stream = new Stream({
channelId,
feedId,
title: channel.name,
url: streamUrl,
userAgent: httpUserAgent,
referrer: httpReferrer,
directives,
quality,
label
})
.withChannel(channelsKeyById)
.withFeed(feedsGroupedByChannelId)
.updateTitle()
.updateFilepath()
streams.add(stream)
processedIssues.add(issue.number)
})
}
import { IssueLoader, PlaylistParser } from '../../core'
import { Playlist, Issue, Stream } from '../../models'
import { loadData, data as apiData } from '../../api'
import { Logger, Collection } from '@freearhey/core'
import { Storage } from '@freearhey/storage-js'
import { STREAMS_DIR } from '../../constants'
import * as sdk from '@iptv-org/sdk'
import { isURI } from '../../utils'
const processedIssues = new Collection()
async function main() {
const logger = new Logger({ level: -999 })
const issueLoader = new IssueLoader()
logger.info('loading issues...')
const issues = await issueLoader.load()
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage
})
const files = await streamsStorage.list('**/*.m3u')
const streams = await parser.parse(files)
logger.info('removing streams...')
await removeStreams({ streams, issues })
logger.info('edit stream description...')
await editStreams({
streams,
issues
})
logger.info('add new streams...')
await addStreams({
streams,
issues
})
logger.info('saving...')
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
let streams = new Collection(groupedStreams.get(filepath))
streams = streams.filter((stream: Stream) => stream.removed === false)
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
console.log(`OUTPUT=${output}`)
}
main()
async function removeStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamUrl')) return
const streamUrls = data.getString('streamUrl') || ''
let changed = false
streamUrls
.split(/\r?\n/)
.filter(Boolean)
.forEach(link => {
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
if (found) {
found.removed = true
changed = true
}
})
if (changed) processedIssues.add(issue.number)
})
}
async function editStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamUrl')) return
const stream: Stream = streams.first(
(_stream: Stream) => _stream.url === data.getString('streamUrl')
)
if (!stream) return
const streamId = data.getString('streamId') || ''
const [channelId, feedId] = streamId.split('@')
if (channelId) {
stream.channel = channelId
stream.feed = feedId
stream.updateTvgId().updateTitle().updateFilepath()
}
stream.updateWithIssue(data)
processedIssues.add(issue.number)
})
}
async function addStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('streamId') || data.missing('streamUrl')) return
if (streams.includes((_stream: Stream) => _stream.url === data.getString('streamUrl'))) return
const streamUrl = data.getString('streamUrl') || ''
if (!isURI(streamUrl)) return
const streamId = data.getString('streamId') || ''
const [channelId, feedId] = streamId.split('@')
const channel: sdk.Models.Channel | undefined = apiData.channelsKeyById.get(channelId)
if (!channel) return
const label = data.getString('label') || ''
const quality = data.getString('quality') || null
const httpUserAgent = data.getString('httpUserAgent') || null
const httpReferrer = data.getString('httpReferrer') || null
const directives = data.getArray('directives') || []
const stream = new Stream({
channel: channelId,
feed: feedId,
title: channel.name,
url: streamUrl,
user_agent: httpUserAgent,
referrer: httpReferrer,
quality
})
stream.label = label
stream.setDirectives(directives).updateTitle().updateFilepath()
streams.add(stream)
processedIssues.add(issue.number)
})
}

View File

@@ -1,129 +1,120 @@
import { Logger, Storage, Collection, Dictionary } from '@freearhey/core'
import { DataLoader, DataProcessor, PlaylistParser } from '../../core'
import { DataProcessorData } from '../../types/dataProcessor'
import { DATA_DIR, ROOT_DIR } from '../../constants'
import { DataLoaderData } from '../../types/dataLoader'
import { BlocklistRecord, Stream } from '../../models'
import { program } from 'commander'
import chalk from 'chalk'
program.argument('[filepath...]', 'Path to file to validate').parse(process.argv)
type LogItem = {
type: string
line: number
message: string
}
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
const processor = new DataProcessor()
const dataStorage = new Storage(DATA_DIR)
const loader = new DataLoader({ storage: dataStorage })
const data: DataLoaderData = await loader.load()
const {
channelsKeyById,
feedsGroupedByChannelId,
logosGroupedByStreamId,
blocklistRecordsGroupedByChannelId
}: DataProcessorData = processor.process(data)
logger.info('loading streams...')
const rootStorage = new Storage(ROOT_DIR)
const parser = new PlaylistParser({
storage: rootStorage,
channelsKeyById,
feedsGroupedByChannelId,
logosGroupedByStreamId
})
const files = program.args.length ? program.args : await rootStorage.list('streams/**/*.m3u')
const streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
let errors = new Collection()
let warnings = new Collection()
const streamsGroupedByFilepath = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of streamsGroupedByFilepath.keys()) {
const streams = streamsGroupedByFilepath.get(filepath)
if (!streams) continue
const log = new Collection()
const buffer = new Dictionary()
streams.forEach((stream: Stream) => {
if (stream.channelId) {
const channel = channelsKeyById.get(stream.channelId)
if (!channel) {
log.add({
type: 'warning',
line: stream.getLine(),
message: `"${stream.id}" is not in the database`
})
}
}
const duplicate = stream.url && buffer.has(stream.url)
if (duplicate) {
log.add({
type: 'warning',
line: stream.getLine(),
message: `"${stream.url}" is already on the playlist`
})
} else {
buffer.set(stream.url, true)
}
const blocklistRecords = stream.channel
? new Collection(blocklistRecordsGroupedByChannelId.get(stream.channel.id))
: new Collection()
blocklistRecords.forEach((blocklistRecord: BlocklistRecord) => {
if (blocklistRecord.reason === 'dmca') {
log.add({
type: 'error',
line: stream.getLine(),
message: `"${blocklistRecord.channelId}" is on the blocklist due to claims of copyright holders (${blocklistRecord.ref})`
})
} else if (blocklistRecord.reason === 'nsfw') {
log.add({
type: 'error',
line: stream.getLine(),
message: `"${blocklistRecord.channelId}" is on the blocklist due to NSFW content (${blocklistRecord.ref})`
})
}
})
})
if (log.notEmpty()) {
console.log(`\n${chalk.underline(filepath)}`)
log.forEach((logItem: LogItem) => {
const position = logItem.line.toString().padEnd(6, ' ')
const type = logItem.type.padEnd(9, ' ')
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
console.log(` ${chalk.gray(position)}${status}${logItem.message}`)
})
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
}
}
if (errors.count() || warnings.count()) {
console.log(
chalk.red(
`\n${
errors.count() + warnings.count()
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
)
)
if (errors.count()) {
process.exit(1)
}
}
}
main()
import { Logger, Collection, Dictionary } from '@freearhey/core'
import { Storage } from '@freearhey/storage-js'
import { PlaylistParser } from '../../core'
import { data, loadData } from '../../api'
import { ROOT_DIR } from '../../constants'
import { Stream } from '../../models'
import * as sdk from '@iptv-org/sdk'
import { program } from 'commander'
import chalk from 'chalk'
program.argument('[filepath...]', 'Path to file to validate').parse(process.argv)
type LogItem = {
type: string
line: number
message: string
}
async function main() {
const logger = new Logger()
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
const rootStorage = new Storage(ROOT_DIR)
const parser = new PlaylistParser({
storage: rootStorage
})
const files = program.args.length ? program.args : await rootStorage.list('streams/**/*.m3u')
const streams = await parser.parse(files)
logger.info(`found ${streams.count()} streams`)
let errors = new Collection()
let warnings = new Collection()
const streamsGroupedByFilepath = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of streamsGroupedByFilepath.keys()) {
const streams = streamsGroupedByFilepath.get(filepath)
if (!streams) continue
const log = new Collection<LogItem>()
const buffer = new Dictionary<boolean>()
streams.forEach((stream: Stream) => {
if (stream.channel) {
const channel = data.channelsKeyById.get(stream.channel)
if (!channel) {
log.add({
type: 'warning',
line: stream.getLine(),
message: `"${stream.tvgId}" is not in the database`
})
}
}
const duplicate = stream.url && buffer.has(stream.url)
if (duplicate) {
log.add({
type: 'warning',
line: stream.getLine(),
message: `"${stream.url}" is already on the playlist`
})
} else {
buffer.set(stream.url, true)
}
if (stream.channel) {
const blocklistRecords = new Collection(
data.blocklistRecordsGroupedByChannel.get(stream.channel)
)
blocklistRecords.forEach((blocklistRecord: sdk.Models.BlocklistRecord) => {
if (blocklistRecord.reason === 'dmca') {
log.add({
type: 'error',
line: stream.getLine(),
message: `"${blocklistRecord.channel}" is on the blocklist due to claims of copyright holders (${blocklistRecord.ref})`
})
} else if (blocklistRecord.reason === 'nsfw') {
log.add({
type: 'error',
line: stream.getLine(),
message: `"${blocklistRecord.channel}" is on the blocklist due to NSFW content (${blocklistRecord.ref})`
})
}
})
}
})
if (log.isNotEmpty()) {
console.log(`\n${chalk.underline(filepath)}`)
log.forEach((logItem: LogItem) => {
const position = logItem.line.toString().padEnd(6, ' ')
const type = logItem.type.padEnd(9, ' ')
const status = logItem.type === 'error' ? chalk.red(type) : chalk.yellow(type)
console.log(` ${chalk.gray(position)}${status}${logItem.message}`)
})
errors = errors.concat(log.filter((logItem: LogItem) => logItem.type === 'error'))
warnings = warnings.concat(log.filter((logItem: LogItem) => logItem.type === 'warning'))
}
}
if (errors.count() || warnings.count()) {
console.log(
chalk.red(
`\n${
errors.count() + warnings.count()
} problems (${errors.count()} errors, ${warnings.count()} warnings)`
)
)
if (errors.count()) {
process.exit(1)
}
}
}
main()