Merge pull request #2909 from iptv-org/patch-2025.10.2

Patch 2025.10.2
This commit is contained in:
Ismaël Moret
2025-10-31 18:06:06 +01:00
committed by GitHub
12 changed files with 274 additions and 190 deletions

View File

@@ -31,7 +31,6 @@ jobs:
- name: Get list of changed *.channels.xml files
id: files
run: |
git fetch origin master:master
CHANNELS_ANY_CHANGED=false
CHANNELS_ALL_CHANGED_FILES=$(git diff --diff-filter=ACMRT --name-only master -- sites/**/*.channels.xml | tr '\n' ' ')
if [ -n "${CHANNELS_ALL_CHANGED_FILES}" ]; then

View File

@@ -66,9 +66,16 @@ module.exports = {
}
```
### Context Object
### Request Context Object
From each function in `config.js` you can access a `context` object containing the following data:
Inside `url()`, `logo()`, `request.data()`, `request.headers()` functions in `*.config.js` you can access a `context` object containing the following data:
- `channel`: The object describing the current channel (xmltv_id, site_id, name, lang)
- `date`: The 'dayjs' instance with the requested date
## Parser Context Object
Inside `parser()` function in `*.config.js` you can access a `context` object containing the following data:
- `channel`: The object describing the current channel (xmltv_id, site_id, name, lang)
- `date`: The 'dayjs' instance with the requested date
@@ -427,7 +434,7 @@ To run scripts use the `npm run <script-name>` command.
- `api:generate`: generates a JSON file with all channels for the [iptv-org/api](https://github.com/iptv-org/api) repository.
- `channels:lint`: сhecks the channel lists for syntax errors.
- `channels:parse`: generates a list of channels based on the site configuration.
- `channels:parse`: formats `*.channels.xml` files. The process involves removing invalid `xmltv_id`, adding missing Feed ID, and sorting the list.
- `channels:format`: formats `*.channels.xml` files. The process involves removing invalid `xmltv_id`, adding missing Feed ID, and sorting the list.
- `channels:edit`: utility for quick channels mapping.
- `channels:validate`: checks the description of channels for errors.
- `sites:init`: creates a new site config from the template.

View File

@@ -1,25 +1,22 @@
FROM node:22-alpine
ARG GIT_REPO=https://github.com/iptv-org/epg.git
ARG GIT_BRANCH=master
ARG WORKDIR=/epg
ENV CRON_SCHEDULE="0 0 * * *"
ENV GZIP=false
ENV MAX_CONNECTIONS=1
ENV DAYS=
ENV RUN_AT_STARTUP=true
RUN apk update \
&& apk upgrade --available \
&& apk add curl git tzdata bash \
&& npm install -g npm@latest \
&& npm install pm2 -g \
&& mkdir $(echo "${WORKDIR}") -p \
&& cd $WORKDIR \
&& git clone --depth 1 -b $(echo "${GIT_BRANCH} ${GIT_REPO}") . \
&& npm install \
&& mkdir /public
RUN apk del git curl \
&& rm -rf /var/cache/apk/*
COPY pm2.config.js $WORKDIR
WORKDIR $WORKDIR
EXPOSE 3000
FROM node:22-alpine
ARG GIT_REPO=https://github.com/iptv-org/epg.git
ARG GIT_BRANCH=master
ARG WORKDIR=/epg
ENV CRON_SCHEDULE="0 0 * * *"
ENV RUN_AT_STARTUP=true
RUN apk update \
&& apk upgrade --available \
&& apk add curl git tzdata bash \
&& npm install -g npm@latest \
&& npm install pm2 -g \
&& mkdir $(echo "${WORKDIR}") -p \
&& cd $WORKDIR \
&& git clone --depth 1 -b $(echo "${GIT_BRANCH} ${GIT_REPO}") . \
&& npm install \
&& mkdir /public
RUN apk del git curl \
&& rm -rf /var/cache/apk/*
COPY pm2.config.js $WORKDIR
WORKDIR $WORKDIR
EXPOSE 3000
CMD [ "pm2-runtime", "pm2.config.js" ]

131
package-lock.json generated
View File

@@ -48,9 +48,10 @@
"consola": "^3.4.2",
"cross-env": "^10.0.0",
"csv-parser": "^3.2.0",
"curl-generator": "^0.5.0",
"cwait": "^1.1.2",
"dayjs": "^1.11.13",
"epg-grabber": "^0.44.0",
"epg-grabber": "^0.45.0",
"epg-parser": "^0.3.1",
"eslint": "^9.32.0",
"eslint-config-prettier": "^10.1.8",
@@ -3425,6 +3426,14 @@
"@types/lodash": "*"
}
},
"node_modules/@types/lodash.padstart": {
"version": "4.6.9",
"resolved": "https://registry.npmjs.org/@types/lodash.padstart/-/lodash.padstart-4.6.9.tgz",
"integrity": "sha512-KVXQ65AiorTc+Dn9eSRZDs1SnzXULRJcMYhCDEIgsRtHU7mbVpghPSxkySh3Vgm+doWVzpJCA24259fkRL46sA==",
"dependencies": {
"@types/lodash": "*"
}
},
"node_modules/@types/lodash.sortby": {
"version": "4.7.9",
"resolved": "https://registry.npmjs.org/@types/lodash.sortby/-/lodash.sortby-4.7.9.tgz",
@@ -4243,6 +4252,18 @@
"tough-cookie": ">=4.0.0"
}
},
"node_modules/axios-mock-adapter": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-2.1.0.tgz",
"integrity": "sha512-AZUe4OjECGCNNssH8SOdtneiQELsqTsat3SQQCWLPjN436/H+L9AjWfV7bF+Zg/YL9cgbhrz5671hoh+Tbn98w==",
"dependencies": {
"fast-deep-equal": "^3.1.3",
"is-buffer": "^2.0.5"
},
"peerDependencies": {
"axios": ">= 0.17.0"
}
},
"node_modules/babel-jest": {
"version": "30.0.5",
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.0.5.tgz",
@@ -5192,9 +5213,9 @@
"integrity": "sha512-yi1x3EAWKjQTreYWeSd98431AV+IEE0qoDyOoaHJ7KJ21gv6HtBXHVLX74opVSGqcR8/AbjJBHAHpcOy2bj5Gg=="
},
"node_modules/curl-generator": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.4.2.tgz",
"integrity": "sha512-YD74vaPyL46XYNbyRCJV91EhYGDrE/EBiW0X/NUIrNZ23jD0Uwr/6vMrCmobYi5KrjtrqN4SnmMhQNYh3qaULw==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.5.0.tgz",
"integrity": "sha512-dKmp63AJfNSplajvPoWIRfBOmp0IT8NETJ9sFw9IABEUTiJqtXdJx09mXxUlTgVCZDYWVxkCPds+mO9puf1J+w==",
"dependencies": {
"ms": "^2.0.0"
}
@@ -5456,17 +5477,19 @@
}
},
"node_modules/epg-grabber": {
"version": "0.44.0",
"resolved": "https://registry.npmjs.org/epg-grabber/-/epg-grabber-0.44.0.tgz",
"integrity": "sha512-M4k/PG1/OIbHV7p8rM23yDWig6WwtpB/LafRzealc8A50HDOGskmkvuhzxf1S34Oe8xL3aU529oW2vocrpijuA==",
"version": "0.45.0",
"resolved": "https://registry.npmjs.org/epg-grabber/-/epg-grabber-0.45.0.tgz",
"integrity": "sha512-GqjXRYOJcC3mX9OYdZHlpNEhwBbCF256uKGPXdUodUjkXTmraolIEXYRUeQJmPbL5/do766EewNwIBXqLzHkJA==",
"dependencies": {
"@freearhey/core": "^0.14.0",
"@types/bluebird": "^3.5.42",
"@types/fs-extra": "^11.0.4",
"@types/lodash": "^4.17.20",
"@types/lodash.merge": "^4.6.9",
"@types/lodash.padstart": "^4.6.9",
"@types/pako": "^2.0.4",
"axios": "^1.12.2",
"axios-cache-interceptor": "^1.8.3",
"axios-mock-adapter": "^2.1.0",
"bluebird": "^3.7.2",
"commander": "^14.0.0",
"curl-generator": "^0.4.2",
@@ -5474,7 +5497,8 @@
"dayjs": "^1.11.18",
"fs-extra": "^11.3.0",
"glob": "^11.0.3",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"lodash.padstart": "^4.6.1",
"pako": "^2.1.0",
"socks-proxy-agent": "^8.0.5",
"winston": "^3.17.0",
@@ -5505,6 +5529,14 @@
"timer-node": "^5.0.9"
}
},
"node_modules/epg-grabber/node_modules/curl-generator": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.4.2.tgz",
"integrity": "sha512-YD74vaPyL46XYNbyRCJV91EhYGDrE/EBiW0X/NUIrNZ23jD0Uwr/6vMrCmobYi5KrjtrqN4SnmMhQNYh3qaULw==",
"dependencies": {
"ms": "^2.0.0"
}
},
"node_modules/epg-parser": {
"version": "0.3.1",
"resolved": "https://registry.npmjs.org/epg-parser/-/epg-parser-0.3.1.tgz",
@@ -6770,6 +6802,28 @@
"node": ">=8"
}
},
"node_modules/is-buffer": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz",
"integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ==",
"funding": [
{
"type": "github",
"url": "https://github.com/sponsors/feross"
},
{
"type": "patreon",
"url": "https://www.patreon.com/feross"
},
{
"type": "consulting",
"url": "https://feross.org/support"
}
],
"engines": {
"node": ">=4"
}
},
"node_modules/is-callable": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
@@ -8651,6 +8705,11 @@
"integrity": "sha512-T0rZxKmghOOf5YPnn8EY5iLYeWCpZq8G41FfqoVHH5QDTAFaghJRmAdLiadEDq+ztgM2q5PjA+Z1fOwGrLgmtg==",
"license": "MIT"
},
"node_modules/lodash.padstart": {
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/lodash.padstart/-/lodash.padstart-4.6.1.tgz",
"integrity": "sha512-sW73O6S8+Tg66eY56DBk85aQzzUJDtpoXFBgELMd5P/SotAguo+1kYO6RuYgXxA4HJH3LFTFPASX6ET6bjfriw=="
},
"node_modules/lodash.sortby": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",
@@ -13586,6 +13645,14 @@
"@types/lodash": "*"
}
},
"@types/lodash.padstart": {
"version": "4.6.9",
"resolved": "https://registry.npmjs.org/@types/lodash.padstart/-/lodash.padstart-4.6.9.tgz",
"integrity": "sha512-KVXQ65AiorTc+Dn9eSRZDs1SnzXULRJcMYhCDEIgsRtHU7mbVpghPSxkySh3Vgm+doWVzpJCA24259fkRL46sA==",
"requires": {
"@types/lodash": "*"
}
},
"@types/lodash.sortby": {
"version": "4.7.9",
"resolved": "https://registry.npmjs.org/@types/lodash.sortby/-/lodash.sortby-4.7.9.tgz",
@@ -14090,6 +14157,15 @@
"http-cookie-agent": "^7.0.2"
}
},
"axios-mock-adapter": {
"version": "2.1.0",
"resolved": "https://registry.npmjs.org/axios-mock-adapter/-/axios-mock-adapter-2.1.0.tgz",
"integrity": "sha512-AZUe4OjECGCNNssH8SOdtneiQELsqTsat3SQQCWLPjN436/H+L9AjWfV7bF+Zg/YL9cgbhrz5671hoh+Tbn98w==",
"requires": {
"fast-deep-equal": "^3.1.3",
"is-buffer": "^2.0.5"
}
},
"babel-jest": {
"version": "30.0.5",
"resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-30.0.5.tgz",
@@ -14741,9 +14817,9 @@
"integrity": "sha512-yi1x3EAWKjQTreYWeSd98431AV+IEE0qoDyOoaHJ7KJ21gv6HtBXHVLX74opVSGqcR8/AbjJBHAHpcOy2bj5Gg=="
},
"curl-generator": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.4.2.tgz",
"integrity": "sha512-YD74vaPyL46XYNbyRCJV91EhYGDrE/EBiW0X/NUIrNZ23jD0Uwr/6vMrCmobYi5KrjtrqN4SnmMhQNYh3qaULw==",
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.5.0.tgz",
"integrity": "sha512-dKmp63AJfNSplajvPoWIRfBOmp0IT8NETJ9sFw9IABEUTiJqtXdJx09mXxUlTgVCZDYWVxkCPds+mO9puf1J+w==",
"requires": {
"ms": "^2.0.0"
}
@@ -14923,17 +14999,19 @@
"integrity": "sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA=="
},
"epg-grabber": {
"version": "0.44.0",
"resolved": "https://registry.npmjs.org/epg-grabber/-/epg-grabber-0.44.0.tgz",
"integrity": "sha512-M4k/PG1/OIbHV7p8rM23yDWig6WwtpB/LafRzealc8A50HDOGskmkvuhzxf1S34Oe8xL3aU529oW2vocrpijuA==",
"version": "0.45.0",
"resolved": "https://registry.npmjs.org/epg-grabber/-/epg-grabber-0.45.0.tgz",
"integrity": "sha512-GqjXRYOJcC3mX9OYdZHlpNEhwBbCF256uKGPXdUodUjkXTmraolIEXYRUeQJmPbL5/do766EewNwIBXqLzHkJA==",
"requires": {
"@freearhey/core": "^0.14.0",
"@types/bluebird": "^3.5.42",
"@types/fs-extra": "^11.0.4",
"@types/lodash": "^4.17.20",
"@types/lodash.merge": "^4.6.9",
"@types/lodash.padstart": "^4.6.9",
"@types/pako": "^2.0.4",
"axios": "^1.12.2",
"axios-cache-interceptor": "^1.8.3",
"axios-mock-adapter": "^2.1.0",
"bluebird": "^3.7.2",
"commander": "^14.0.0",
"curl-generator": "^0.4.2",
@@ -14941,7 +15019,8 @@
"dayjs": "^1.11.18",
"fs-extra": "^11.3.0",
"glob": "^11.0.3",
"lodash": "^4.17.21",
"lodash.merge": "^4.6.2",
"lodash.padstart": "^4.6.1",
"pako": "^2.1.0",
"socks-proxy-agent": "^8.0.5",
"winston": "^3.17.0",
@@ -14965,6 +15044,14 @@
"pako": "^2.1.0",
"timer-node": "^5.0.9"
}
},
"curl-generator": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/curl-generator/-/curl-generator-0.4.2.tgz",
"integrity": "sha512-YD74vaPyL46XYNbyRCJV91EhYGDrE/EBiW0X/NUIrNZ23jD0Uwr/6vMrCmobYi5KrjtrqN4SnmMhQNYh3qaULw==",
"requires": {
"ms": "^2.0.0"
}
}
}
},
@@ -15804,6 +15891,11 @@
"binary-extensions": "^2.0.0"
}
},
"is-buffer": {
"version": "2.0.5",
"resolved": "https://registry.npmjs.org/is-buffer/-/is-buffer-2.0.5.tgz",
"integrity": "sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ=="
},
"is-callable": {
"version": "1.2.7",
"resolved": "https://registry.npmjs.org/is-callable/-/is-callable-1.2.7.tgz",
@@ -17201,6 +17293,11 @@
"resolved": "https://registry.npmjs.org/lodash.orderby/-/lodash.orderby-4.6.0.tgz",
"integrity": "sha512-T0rZxKmghOOf5YPnn8EY5iLYeWCpZq8G41FfqoVHH5QDTAFaghJRmAdLiadEDq+ztgM2q5PjA+Z1fOwGrLgmtg=="
},
"lodash.padstart": {
"version": "4.6.1",
"resolved": "https://registry.npmjs.org/lodash.padstart/-/lodash.padstart-4.6.1.tgz",
"integrity": "sha512-sW73O6S8+Tg66eY56DBk85aQzzUJDtpoXFBgELMd5P/SotAguo+1kYO6RuYgXxA4HJH3LFTFPASX6ET6bjfriw=="
},
"lodash.sortby": {
"version": "4.7.0",
"resolved": "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz",

View File

@@ -78,9 +78,10 @@
"consola": "^3.4.2",
"cross-env": "^10.0.0",
"csv-parser": "^3.2.0",
"curl-generator": "^0.5.0",
"cwait": "^1.1.2",
"dayjs": "^1.11.13",
"epg-grabber": "^0.44.0",
"epg-grabber": "^0.45.0",
"epg-parser": "^0.3.1",
"eslint": "^9.32.0",
"eslint-config-prettier": "^10.1.8",

View File

@@ -1,15 +1,19 @@
import { Logger, Timer, Collection, Template } from '@freearhey/core'
import epgGrabber, { EPGGrabber, EPGGrabberMock } from 'epg-grabber'
import { loadJs, parseProxy, SiteConfig, Queue } from '../../core'
import { loadJs, parseProxy, Queue, parseNumber } from '../../core'
import { CurlBody } from 'curl-generator/dist/bodies/body'
import { Channel, Guide, Program } from '../../models'
import { SocksProxyAgent } from 'socks-proxy-agent'
import defaultConfig from '../../default.config'
import { PromisyClass, TaskQueue } from 'cwait'
import { Storage } from '@freearhey/storage-js'
import { CurlGenerator } from 'curl-generator'
import { QueueItem } from '../../types/queue'
import { Option, program } from 'commander'
import { SITES_DIR } from '../../constants'
import { data, loadData } from '../../api'
import dayjs, { Dayjs } from 'dayjs'
import merge from 'lodash.merge'
import path from 'path'
program
@@ -25,12 +29,12 @@ program
.addOption(
new Option('-t, --timeout <milliseconds>', 'Override the default timeout for each request')
.env('TIMEOUT')
.argParser(parseInt)
.argParser(parseNumber)
)
.addOption(
new Option('-d, --delay <milliseconds>', 'Override the default delay between request')
.env('DELAY')
.argParser(parseInt)
.argParser(parseNumber)
)
.addOption(new Option('-x, --proxy <url>', 'Use the specified proxy').env('PROXY'))
.addOption(
@@ -38,13 +42,13 @@ program
'--days <days>',
'Override the number of days for which the program will be loaded (defaults to the value from the site config)'
)
.argParser(parseInt)
.argParser(parseNumber)
.env('DAYS')
)
.addOption(
new Option('--maxConnections <number>', 'Limit on the number of concurrent requests')
.default(1)
.argParser(parseInt)
.argParser(parseNumber)
.env('MAX_CONNECTIONS')
)
.addOption(
@@ -53,6 +57,7 @@ program
.env('GZIP')
)
.addOption(new Option('--curl', 'Display each request as CURL').default(false).env('CURL'))
.addOption(new Option('--debug', 'Enable debug mode').default(false).env('DEBUG'))
.parse()
interface GrabOptions {
@@ -61,6 +66,7 @@ interface GrabOptions {
output: string
gzip: boolean
curl: boolean
debug: boolean
maxConnections: number
timeout?: number
delay?: number
@@ -72,25 +78,87 @@ interface GrabOptions {
const options: GrabOptions = program.opts()
async function main() {
if (!options.site && !options.channels)
if (typeof options.site !== 'string' && typeof options.channels !== 'string')
throw new Error('One of the arguments must be presented: `--site` or `--channels`')
const logger = new Logger()
const LOG_LEVELS = { info: 3, debug: 4 }
const logger = new Logger({ level: options.debug ? LOG_LEVELS['debug'] : LOG_LEVELS['info'] })
logger.info('starting...')
let config: epgGrabber.Types.SiteConfig = defaultConfig
logger.info('config:')
logger.tree(options)
if (typeof options.timeout === 'number')
config = merge(config, { request: { timeout: options.timeout } })
if (options.proxy !== undefined) {
const proxy = parseProxy(options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(options.proxy)
config = merge(config, {
request: { httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
})
} else {
config = merge(config, { request: { proxy } })
}
}
if (typeof options.output === 'string') config.output = options.output
if (typeof options.days === 'number') config.days = options.days
if (typeof options.delay === 'number') config.delay = options.delay
if (typeof options.maxConnections === 'number') config.maxConnections = options.maxConnections
if (typeof options.curl === 'boolean') config.curl = options.curl
if (typeof options.gzip === 'boolean') config.gzip = options.gzip
const grabber =
process.env.NODE_ENV === 'test' ? new EPGGrabberMock(config) : new EPGGrabber(config)
const globalConfig = grabber.globalConfig
logger.debug(`config: ${JSON.stringify(globalConfig, null, 2)}`)
grabber.client.instance.interceptors.request.use(
request => {
if (globalConfig.curl) {
type AllowedMethods =
| 'GET'
| 'get'
| 'POST'
| 'post'
| 'PUT'
| 'put'
| 'PATCH'
| 'patch'
| 'DELETE'
| 'delete'
const url = request.url || ''
const method = request.method ? (request.method as AllowedMethods) : 'GET'
const headers = request.headers
? (request.headers.toJSON() as Record<string, string>)
: undefined
const body = request.data ? (request.data as CurlBody) : undefined
const curl = CurlGenerator({ url, method, headers, body })
console.log(curl)
}
return request
},
error => Promise.reject(error)
)
logger.info('loading channels...')
const storage = new Storage()
let files: string[] = []
if (options.site) {
if (typeof options.site === 'string') {
let pattern = path.join(SITES_DIR, options.site, '*.channels.xml')
pattern = pattern.replace(/\\/g, '/')
files = await storage.list(pattern)
} else if (options.channels) {
} else if (typeof options.channels === 'string') {
files = await storage.list(options.channels)
}
@@ -105,7 +173,7 @@ async function main() {
channelsFromXML.concat(_channelsFromXML)
}
if (options.lang) {
if (typeof options.lang === 'string') {
channelsFromXML = channelsFromXML.filter((channel: Channel) => {
if (!options.lang) return true
@@ -119,7 +187,6 @@ async function main() {
await loadData()
logger.info('creating queue...')
let index = 0
const queue = new Queue()
@@ -127,38 +194,11 @@ async function main() {
channel.index = index++
if (!channel.site || !channel.site_id || !channel.name) continue
const configObject = await loadJs(channel.getConfigPath())
const siteConfig = new SiteConfig(configObject)
siteConfig.filepath = channel.getConfigPath()
if (options.timeout !== undefined) {
siteConfig.request = { ...siteConfig.request, ...{ timeout: options.timeout } }
}
if (options.delay !== undefined) siteConfig.delay = options.delay
if (options.curl !== undefined) siteConfig.curl = options.curl
if (options.proxy !== undefined) {
const proxy = parseProxy(options.proxy)
if (
proxy.protocol &&
['socks', 'socks5', 'socks5h', 'socks4', 'socks4a'].includes(String(proxy.protocol))
) {
const socksProxyAgent = new SocksProxyAgent(options.proxy)
siteConfig.request = {
...siteConfig.request,
...{ httpAgent: socksProxyAgent, httpsAgent: socksProxyAgent }
}
} else {
siteConfig.request = { ...siteConfig.request, ...{ proxy } }
}
}
const config = await loadJs(channel.getConfigPath())
const days: number = config.days || globalConfig.days
if (!channel.xmltv_id) channel.xmltv_id = channel.site_id
const days = options.days || siteConfig.days || 1
const currDate = dayjs.utc(process.env.CURR_DATE || new Date().toISOString())
const dates = Array.from({ length: days }, (_, day) => currDate.add(day, 'd'))
@@ -168,14 +208,12 @@ async function main() {
queue.add(key, {
channel,
date,
siteConfig,
config,
error: null
})
})
}
const grabber = process.env.NODE_ENV === 'test' ? new EPGGrabberMock() : new EPGGrabber()
const taskQueue = new TaskQueue(Promise as PromisyClass, options.maxConnections)
const queueItems = queue.getItems()
@@ -188,10 +226,10 @@ async function main() {
const requests = queueItems.map(
taskQueue.wrap(async (queueItem: QueueItem) => {
const { channel, siteConfig, date } = queueItem
const { channel, config, date } = queueItem
if (!channel.logo) {
if (siteConfig.logo) {
if (config.logo) {
channel.logo = await grabber.loadLogo(channel, date)
} else {
channel.logo = getLogoForChannel(channel)
@@ -203,7 +241,7 @@ async function main() {
const channelPrograms = await grabber.grab(
channel,
date,
siteConfig,
config,
(context: epgGrabber.Types.GrabCallbackContext, error: Error | null) => {
logger.info(
` [${i}/${total}] ${context.channel.site} (${context.channel.lang}) - ${
@@ -235,23 +273,18 @@ async function main() {
const pathTemplate = new Template(options.output)
const channelsGroupedByKey = channels
.sortBy([(channel: Channel) => channel.index, (channel: Channel) => channel.xmltv_id])
.uniqBy((channel: Channel) => `${channel.xmltv_id}:${channel.site}:${channel.lang}`)
.groupBy((channel: Channel) => {
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
})
const channelsGroupedByKey = channels.groupBy((channel: Channel) => {
return pathTemplate.format({ lang: channel.lang || 'en', site: channel.site || '' })
})
const programsGroupedByKey = programs
.sortBy([(program: Program) => program.channel, (program: Program) => program.start])
.groupBy((program: Program) => {
const lang =
program.titles && program.titles.length && program.titles[0].lang
? program.titles[0].lang
: 'en'
const programsGroupedByKey = programs.groupBy((program: Program) => {
const lang =
program.titles && program.titles.length && program.titles[0].lang
? program.titles[0].lang
: 'en'
return pathTemplate.format({ lang, site: program.site || '' })
})
return pathTemplate.format({ lang, site: program.site || '' })
})
for (const groupKey of channelsGroupedByKey.keys()) {
const groupChannels = new Collection(channelsGroupedByKey.get(groupKey))

View File

@@ -1,4 +1,3 @@
export * from './htmlTable'
export * from './siteConfig'
export * from './utils'
export * from './queue'

View File

@@ -1,71 +0,0 @@
import * as epgGrabber from 'epg-grabber'
import merge from 'lodash.merge'
const _default = {
days: 1,
delay: 0,
output: 'guide.xml',
request: {
method: 'GET',
maxContentLength: 5242880,
timeout: 30000,
withCredentials: true,
jar: null,
responseType: 'arraybuffer',
cache: false,
headers: null,
data: null
},
maxConnections: 1,
site: undefined,
url: undefined,
parser: undefined,
channels: undefined,
lang: 'en',
debug: false,
gzip: false,
curl: false,
logo: ''
}
export class SiteConfig {
days: number
lang: string
delay: number
debug: boolean
gzip: boolean
curl: boolean
maxConnections: number
output: string
request: epgGrabber.Types.SiteConfigRequestConfig
site: string
channels?: string | string[]
url: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
parser: (
context: epgGrabber.Types.SiteConfigParserContext
) =>
| epgGrabber.Types.SiteConfigParserResult[]
| Promise<epgGrabber.Types.SiteConfigParserResult[]>
logo: ((context: epgGrabber.Types.SiteConfigRequestContext) => string | Promise<string>) | string
filepath: string
constructor(config: epgGrabber.Types.SiteConfigObject) {
this.site = config.site
this.channels = config.channels
this.url = config.url
this.parser = config.parser
this.filepath = config.filepath
this.days = config.days || _default.days
this.lang = config.lang || _default.lang
this.delay = config.delay || _default.delay
this.debug = config.debug || _default.debug
this.maxConnections = config.maxConnections || _default.maxConnections
this.gzip = config.gzip || _default.gzip
this.curl = config.curl || _default.curl
this.output = config.output || _default.output
this.logo = config.logo || _default.logo
this.request = merge(_default.request, config.request)
}
}

View File

@@ -104,3 +104,7 @@ export async function loadIssues(props?: { labels: string[] | string }) {
return new Collection(issues).map(data => new Issue(data))
}
export function parseNumber(value: string): number {
return parseInt(value)
}

10
scripts/default.config.js Normal file
View File

@@ -0,0 +1,10 @@
export default {
days: 1,
delay: 0,
request: {
maxContentLength: 5242880,
timeout: 30000,
withCredentials: true,
jar: null
}
}

View File

@@ -1,10 +1,10 @@
import { SiteConfig } from '../core/siteConfig'
import { Channel } from '../models/channel'
import epgGrabber from 'epg-grabber'
import { Dayjs } from 'dayjs'
export interface QueueItem {
channel: Channel
date: Dayjs
siteConfig: SiteConfig
config: epgGrabber.Types.SiteConfig
error: string | null
}

View File

@@ -99,16 +99,24 @@ describe('epg:grab', () => {
content('tests/__data__/expected/epg_grab/gzip.guide.xml')
)
const expected = pako.ungzip(fs.readFileSync('tests/__data__/output/guides/gzip.guide.xml.gz'))
const result = pako.ungzip(
fs.readFileSync('tests/__data__/expected/epg_grab/gzip.guide.xml.gz')
const outputString = pako.ungzip(
fs.readFileSync('tests/__data__/output/guides/gzip.guide.xml.gz'),
{ to: 'string' }
)
expect(expected).toEqual(result)
const expectedString = pako.ungzip(
fs.readFileSync('tests/__data__/expected/epg_grab/gzip.guide.xml.gz'),
{ to: 'string' }
)
const output = new Set(outputString.split('\r\n'))
const expected = new Set(expectedString.split('\r\n'))
expect(output).toEqual(expected)
})
})
function content(filepath: string) {
return fs.readFileSync(pathToFileURL(filepath), {
encoding: 'utf8'
})
const string = fs.readFileSync(pathToFileURL(filepath), 'utf8')
return new Set(string.split('\r\n'))
}