Merge pull request #37426 from iptv-org/patch-2026.05.1

Patch 2026.05.1
This commit is contained in:
Pecaquito
2026-05-02 09:47:28 -04:00
committed by GitHub
19 changed files with 353 additions and 229 deletions

View File

@@ -27,8 +27,10 @@ jobs:
- name: Install dependencies
run: npm install
- name: Update internal playlists
run: npm run playlist:update --silent >> $GITHUB_OUTPUT
id: playlist-update
run: |
npm run playlist:update
echo "processed_issues=$(cat temp/logs/playlist_update.log)" >> $GITHUB_OUTPUT
- name: Check internal playlists
run: |
npm run playlist:lint
@@ -44,10 +46,11 @@ jobs:
git config user.name "iptv-bot[bot]"
git config user.email "84861620+iptv-bot[bot]@users.noreply.github.com"
- name: Commit changes to /streams
if: steps.playlist-update.outputs.processed_issues != 0
run: |
git add streams
git status
git commit --allow-empty -m "[Bot] Update /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." -m "${{ steps.playlist-update.outputs.OUTPUT }}" --no-verify
git commit --allow-empty -m "[Bot] Update /streams" -m "Committed by [iptv-bot](https://github.com/apps/iptv-bot) via [update](https://github.com/iptv-org/iptv/actions/runs/${{ github.run_id }}) workflow." -m "${{ steps.playlist-update.outputs.processed_issues }}" --no-verify
- name: Commit changes to PLAYLIST.md
run: |
git add PLAYLISTS.md

View File

@@ -121,7 +121,7 @@ STREAM_URL
| `STREAM_TITLE` | Stream title consisting of channel name and feed name. May contain any characters except: `,`, `[`, `]`. | Required | - |
| `QUALITY` | Maximum stream quality. | Optional | `2160p`, `1080p`, `720p`, `480p`, `360p` etc |
| `LABEL` | Specified in cases where the broadcast for some reason may not be available to some users. | Optional | `Geo-blocked` or `Not 24/7` |
| `STREAM_URL` | Stream URL. | Required | - |
| `STREAM_URL` | Stream URL. The following protocols are supported: `HTTPS`, `HTTP`, `MMS`, `MMSH`, `RTSP`, `RTMP`, `SRT`, `RTP`, `UDP`. | Required | - |
Example:

56
package-lock.json generated
View File

@@ -14,7 +14,7 @@
"@freearhey/core": "^0.14.3",
"@freearhey/storage-js": "^0.1.0",
"@inquirer/prompts": "^7.8.0",
"@iptv-org/sdk": "^1.1.4",
"@iptv-org/sdk": "^1.4.0",
"@octokit/core": "^7.0.3",
"@octokit/plugin-paginate-graphql": "^6.0.0",
"@octokit/plugin-paginate-rest": "^13.1.1",
@@ -46,7 +46,7 @@
"jest": "^30.0.5",
"jest-expect-message": "^1.1.3",
"lodash.uniqueid": "^4.0.1",
"m3u-linter": "^0.4.2",
"m3u-linter": "^0.4.3",
"mediainfo.js": "^0.3.6",
"mpd-parser": "^1.3.1",
"node-cleanup": "^2.1.2",
@@ -726,15 +726,12 @@
}
},
"node_modules/@freearhey/search-js": {
"version": "0.2.1",
"resolved": "https://registry.npmjs.org/@freearhey/search-js/-/search-js-0.2.1.tgz",
"integrity": "sha512-RXVJ2AaXjnrLPpLHCOWrdgtYc4SZplYl905INFmhL6V8jcyIrX+qrjkAjwAHqWDTnJSYfSG9D9Xr+EyKx/eXng==",
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/@freearhey/search-js/-/search-js-0.3.0.tgz",
"integrity": "sha512-wsUBM1vA+fRqGZ+G5EnR7v52KxomoCsVOnlv43kHWt2G/LNs5pGM+aLAyx0MO/sDyBo3ADgw1h30Ydi6+1CMeQ==",
"license": "MIT",
"dependencies": {
"lodash": "^4.17.21"
},
"engines": {
"node": ">=16.0.0"
"node": ">=16.6.0"
}
},
"node_modules/@freearhey/storage-js": {
@@ -1126,15 +1123,15 @@
}
},
"node_modules/@iptv-org/sdk": {
"version": "1.1.4",
"resolved": "https://registry.npmjs.org/@iptv-org/sdk/-/sdk-1.1.4.tgz",
"integrity": "sha512-NFviCin8V9rKPP+GG7xZ0mVi+WCwxqTFzjRH8or9KVLemFXrGI+ibGQ1PgiGUadRFSxTejXo2Dvwjwdwr2NTiQ==",
"version": "1.4.0",
"resolved": "https://registry.npmjs.org/@iptv-org/sdk/-/sdk-1.4.0.tgz",
"integrity": "sha512-elnrVBBaZAGKlcb/2+5M1BuHSjD10z5yUsPn3sRY/9a9kwgbcsZ2h6ylh0QON0FY54xy8Ee1Y5ORuc45o3VLVw==",
"license": "UNLICENSED",
"dependencies": {
"@freearhey/core": "^0.15.1",
"@freearhey/search-js": "^0.2.1",
"@freearhey/search-js": "^0.3.0",
"@ntlab/sfetch": "^1.2.0",
"axios": "^1.11.0",
"axios": "^1.15.2",
"dayjs": "^1.11.18"
}
},
@@ -3169,9 +3166,6 @@
"cpu": [
"arm64"
],
"libc": [
"glibc"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3185,9 +3179,6 @@
"cpu": [
"arm64"
],
"libc": [
"musl"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3201,9 +3192,6 @@
"cpu": [
"ppc64"
],
"libc": [
"glibc"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3217,9 +3205,6 @@
"cpu": [
"riscv64"
],
"libc": [
"glibc"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3233,9 +3218,6 @@
"cpu": [
"riscv64"
],
"libc": [
"musl"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3249,9 +3231,6 @@
"cpu": [
"s390x"
],
"libc": [
"glibc"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3265,9 +3244,6 @@
"cpu": [
"x64"
],
"libc": [
"glibc"
],
"license": "MIT",
"optional": true,
"os": [
@@ -3281,9 +3257,6 @@
"cpu": [
"x64"
],
"libc": [
"musl"
],
"license": "MIT",
"optional": true,
"os": [
@@ -6676,9 +6649,10 @@
}
},
"node_modules/m3u-linter": {
"version": "0.4.2",
"resolved": "https://registry.npmjs.org/m3u-linter/-/m3u-linter-0.4.2.tgz",
"integrity": "sha512-KeUirh5JnGRuFDb+7tsc8Dkb+q/slco3hszKE7lwbmGKKo+QnSr7198e1ATYZ9HkY8sbvEQBuTj2hPsv9o3gGw==",
"version": "0.4.3",
"resolved": "https://registry.npmjs.org/m3u-linter/-/m3u-linter-0.4.3.tgz",
"integrity": "sha512-rdB8cGbJWiobTmhHhOiXV30j8mT67K4P85/Rv3NkhOQ9cPnlLeOkRmKPMWoTmDSyPtii/nD1Wd9WlsNlPk8akg==",
"license": "MIT",
"dependencies": {
"chalk": "^4.1.1",
"commander": "^7.2.0",

View File

@@ -38,7 +38,7 @@
"@freearhey/core": "^0.14.3",
"@freearhey/storage-js": "^0.1.0",
"@inquirer/prompts": "^7.8.0",
"@iptv-org/sdk": "^1.1.4",
"@iptv-org/sdk": "^1.4.0",
"@octokit/core": "^7.0.3",
"@octokit/plugin-paginate-graphql": "^6.0.0",
"@octokit/plugin-paginate-rest": "^13.1.1",
@@ -70,7 +70,7 @@
"jest": "^30.0.5",
"jest-expect-message": "^1.1.3",
"lodash.uniqueid": "^4.0.1",
"m3u-linter": "^0.4.2",
"m3u-linter": "^0.4.3",
"mediainfo.js": "^0.3.6",
"mpd-parser": "^1.3.1",
"node-cleanup": "^2.1.2",

View File

@@ -1,186 +1,280 @@
import { isURI, getStreamInfo, loadIssues } from '../../utils'
import { getStreamInfo, loadIssues, createThread } from '../../utils'
import { STREAMS_DIR, LOGS_DIR } from '../../constants'
import { Playlist, Issue, Stream } from '../../models'
import { loadData, data as apiData } from '../../api'
import { Logger, Collection } from '@freearhey/core'
import { Storage } from '@freearhey/storage-js'
import { STREAMS_DIR } from '../../constants'
import { PlaylistParser } from '../../core'
import * as sdk from '@iptv-org/sdk'
const processedIssues = new Collection()
const processedIssues = new Collection<Issue>()
const skippedIssues = new Collection<Issue>()
const logger = new Logger({ level: 5 })
let streams = new Collection<Stream>()
let cache = new Collection<Stream>()
function cacheData() {
cache = streams.clone()
}
function resetData() {
streams = cache
}
async function main() {
const logger = new Logger({ level: -999 })
logger.info('loading data from api...')
await loadData()
logger.info('loading issues...')
const issues = await loadIssues()
logger.info('loading data from api...')
await loadData()
logger.info('loading streams...')
await loadStreams()
logger.info('processing issues...')
await processIssues(issues)
logger.info('saving streams...')
await saveStreams()
logger.info('saving logs...')
await saveLogs()
logger.info(
`skipped ${skippedIssues.count()} issue(s): ${skippedIssues
.map((issue: Issue) => `#${issue.number}`)
.join(', ')}`
)
logger.info(
`processed ${processedIssues.count()} issue(s): ${processedIssues
.map((issue: Issue) => `#${issue.number}`)
.join(', ')}`
)
}
main()
async function saveLogs() {
const logStorage = new Storage(LOGS_DIR)
const output = processedIssues.map((issue: Issue) => `closes #${issue.number}`).join(', ')
await logStorage.save('playlist_update.log', output)
}
async function saveStreams() {
const streamsStorage = new Storage(STREAMS_DIR)
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
let filteredStreams = new Collection<Stream>(groupedStreams.get(filepath))
filteredStreams = filteredStreams.filter((stream: Stream) => stream.removed === false)
const playlist = new Playlist(filteredStreams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
}
}
async function loadStreams() {
const streamsStorage = new Storage(STREAMS_DIR)
const parser = new PlaylistParser({
storage: streamsStorage
})
const files = await streamsStorage.list('**/*.m3u')
const streams = await parser.parse(files)
logger.info('removing streams...')
await removeStreams({ streams, issues })
streams = await parser.parse(files)
}
logger.info('edit stream description...')
await editStreams({
streams,
issues
})
async function processIssues(issues: Collection<Issue>) {
const requests = issues.filter((issue: Issue) => issue.labels.includes('approved')).all()
logger.info('add new streams...')
await addStreams({
streams,
issues
})
for (const issue of requests) {
switch (true) {
case issue.labels.includes('streams:remove'):
await removeStream(issue)
break
case issue.labels.includes('streams:edit'):
await editStream(issue)
break
case issue.labels.includes('streams:add'):
await addStream(issue)
break
}
}
}
logger.info('saving...')
const groupedStreams = streams.groupBy((stream: Stream) => stream.getFilepath())
for (const filepath of groupedStreams.keys()) {
let streams = new Collection(groupedStreams.get(filepath))
streams = streams.filter((stream: Stream) => stream.removed === false)
async function removeStream(issue: Issue) {
const log = createThread(issue, 'streams/remove')
log.start()
const playlist = new Playlist(streams, { public: false })
await streamsStorage.save(filepath, playlist.toString())
const data = issue.data
if (data.missing('stream_url')) {
log.error('The request is missing the "Stream URL"')
skippedIssues.add(issue)
return
}
const output = processedIssues.map(issue_number => `closes #${issue_number}`).join(', ')
console.log(`OUTPUT=${output}`)
}
const streamUrls = data.getString('stream_url') || ''
main()
async function removeStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:remove') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('stream_url')) return
const streamUrls = data.getString('stream_url') || ''
let changed = false
streamUrls
.split(/\r?\n/)
.filter(Boolean)
.forEach(link => {
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
if (found) {
found.removed = true
changed = true
}
})
if (changed) processedIssues.add(issue.number)
})
}
async function editStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:edit') && issue.labels.includes('approved')
)
requests.forEach((issue: Issue) => {
const data = issue.data
if (data.missing('stream_url')) return
const stream: Stream = streams.first(
(_stream: Stream) => _stream.url === data.getString('stream_url')
)
if (!stream) return
const streamId = data.getString('stream_id') || ''
const [channelId, feedId] = streamId.split('@')
if (channelId) {
stream.channel = channelId
stream.feed = feedId
stream.updateTvgId().updateTitle().updateFilepath()
}
stream.updateWithIssue(data)
processedIssues.add(issue.number)
})
}
async function addStreams({
streams,
issues
}: {
streams: Collection<Stream>
issues: Collection<Issue>
}) {
const requests = issues.filter(
issue => issue.labels.includes('streams:add') && issue.labels.includes('approved')
)
for (const issue of requests.all()) {
const data = issue.data
if (data.missing('stream_id') || data.missing('stream_url')) continue
if (streams.includes((_stream: Stream) => _stream.url === data.getString('stream_url')))
continue
const streamUrl = data.getString('stream_url') || ''
if (!isURI(streamUrl)) continue
const streamId = data.getString('stream_id') || ''
const [channelId, feedId] = streamId.split('@')
const channel: sdk.Models.Channel | undefined = apiData.channelsKeyById.get(channelId)
if (!channel) continue
const label = data.getString('label') || ''
const httpUserAgent = data.getString('http_user_agent') || null
const httpReferrer = data.getString('http_referrer') || null
let quality = data.getString('quality') || null
if (!quality) {
const streamInfo = await getStreamInfo(streamUrl, { httpUserAgent, httpReferrer })
if (streamInfo) {
const height = streamInfo?.resolution?.height
if (height) {
quality = `${height}p`
}
let changed = false
streamUrls
.split(/\r?\n/)
.filter(Boolean)
.forEach((link: string) => {
const found: Stream = streams.first((_stream: Stream) => _stream.url === link.trim())
if (found) {
found.removed = true
changed = true
log.info(`The stream with the URL "${link}" has been removed from the playlists`)
} else {
log.error(`The stream with the URL "${link}" is missing from the playlists`)
}
}
const stream = new Stream({
channel: channelId,
feed: feedId,
title: channel.name,
url: streamUrl,
user_agent: httpUserAgent,
referrer: httpReferrer,
quality
})
stream.label = label
stream.updateTitle().updateFilepath()
streams.add(stream)
processedIssues.add(issue.number)
if (changed) {
processedIssues.add(issue)
} else {
log.error(`None of the URLs specified in the request were found in the playlists`)
skippedIssues.add(issue)
}
}
async function editStream(issue: Issue) {
const log = createThread(issue, 'streams/edit')
log.start()
const data = issue.data
const streamUrl = data.getString('stream_url')
if (!streamUrl) {
log.error('The request is missing the "Stream URL"')
skippedIssues.add(issue)
return
}
const stream: Stream = streams.first((_stream: Stream) => _stream.url === streamUrl)
if (!stream) {
log.error(`The stream with the URL "${streamUrl}" is already in the playlists`)
skippedIssues.add(issue)
return
}
cacheData()
stream.updateWithIssue(data)
const errors = new Collection<Error>()
errors.concat(stream.validate())
if (errors.isNotEmpty()) {
errors.forEach((err: Error) => {
log.error(err.message)
})
skippedIssues.add(issue)
resetData()
log.info('All changes have been reverted')
return
}
log.info('The stream description has been updated')
processedIssues.add(issue)
}
async function addStream(issue: Issue) {
const log = createThread(issue, 'streams/add')
log.start()
const data = issue.data
if (data.missing('stream_id')) {
log.error('The request is missing the "Stream ID"')
skippedIssues.add(issue)
return
}
const streamUrl = data.getString('stream_url')
if (!streamUrl) {
log.error('The request is missing the "Stream URL"')
skippedIssues.add(issue)
return
}
if (streams.includes((_stream: Stream) => _stream.url === streamUrl)) {
log.error(`The stream with the URL "${streamUrl}" is already included in the playlists`)
skippedIssues.add(issue)
return
}
const streamId = data.getString('stream_id') || ''
const [channelId, feedId] = streamId.split('@')
const channel: sdk.Models.Channel | undefined = apiData.channelsKeyById.get(channelId)
if (!channel) {
log.error(`There is no channel with the ID "${channelId}" in the database`)
skippedIssues.add(issue)
return
}
const blocklistRecords: sdk.Models.BlocklistRecord[] | undefined =
apiData.blocklistRecordsGroupedByChannel.get(channelId)
if (blocklistRecords) {
blocklistRecords.forEach((record: sdk.Models.BlocklistRecord) => {
if (record.reason === 'dmca') {
log.error(
`The channel has been added to our blocklist due to the claims of the copyright holder: ${record.ref}`
)
} else if (record.reason === 'nsfw') {
log.error(`The channel has been added to our blocklist due to NSFW content: ${record.ref}`)
}
})
skippedIssues.add(issue)
return
}
cacheData()
const httpUserAgent = data.getString('http_user_agent') || null
const httpReferrer = data.getString('http_referrer') || null
let quality = data.getString('quality') || null
if (!quality) {
const streamInfo = await getStreamInfo(streamUrl, { httpUserAgent, httpReferrer })
if (streamInfo) {
const height = streamInfo?.resolution?.height
if (height) {
quality = `${height}p`
}
}
}
const stream = new Stream({
channel: channelId,
feed: feedId,
title: channel.name,
url: streamUrl,
user_agent: httpUserAgent,
referrer: httpReferrer,
quality,
label: data.getString('label') || ''
})
stream.updateTitle().updateFilepath()
streams.add(stream)
const errors = new Collection<Error>()
errors.concat(stream.validate())
if (errors.isNotEmpty()) {
errors.forEach((err: Error) => {
log.error(err.message)
})
skippedIssues.add(issue)
resetData()
log.info('All changes have been reverted')
return
}
log.info('The stream has been added to playlists')
processedIssues.add(issue)
}

View File

@@ -39,7 +39,7 @@ async function main() {
const streams = await parser.parse(files)
const buffer = new Dictionary<Stream>()
streams.forEach((stream: Stream) => {
if (!selectedFiles.includes(stream.filepath)) {
if (!stream.filepath || !selectedFiles.includes(stream.filepath)) {
buffer.set(stream.url, stream)
}
})
@@ -70,7 +70,7 @@ async function main() {
log.add({
type: 'error',
line: stream.getLine(),
message: `"${stream.url}" is already in the "${origin.filepath}"`
message: `"${stream.url}" is already in the "${origin?.filepath}"`
})
} else {
buffer.set(stream.url, stream)

View File

@@ -1,15 +1,15 @@
import { IssueData } from '../core'
import { DataSet } from '../core'
type IssueProps = {
number: number
labels: string[]
data: IssueData
data: DataSet
}
export class Issue {
number: number
labels: string[]
data: IssueData
data: DataSet
constructor({ number, labels, data }: IssueProps) {
this.number = number

View File

@@ -1,8 +1,8 @@
import { normalizeURL, isURI } from '../utils'
import { Collection } from '@freearhey/core'
import parser from 'iptv-playlist-parser'
import { normalizeURL } from '../utils'
import * as sdk from '@iptv-org/sdk'
import { IssueData } from '../core'
import { DataSet } from '../core'
import { data } from '../api'
import path from 'node:path'
@@ -12,15 +12,32 @@ export class Stream extends sdk.Models.Stream {
groupTitle: string = 'Undefined'
removed: boolean = false
tvgId?: string
label: string | null
statusCode?: string
updateWithIssue(issueData: IssueData): this {
validate(): Collection<Error> {
const errors = new Collection<Error>()
if (!isURI(this.url)) {
errors.add(new Error(`The stream URL "${this.url}" is invalid`))
}
return errors
}
updateWithIssue(dataSet: DataSet): this {
const streamId = dataSet.getString('stream_id') || ''
const [channelId, feedId] = streamId.split('@')
if (channelId) {
this.channel = channelId
this.feed = feedId
this.updateTvgId().updateTitle().updateFilepath()
}
const data = {
label: issueData.getString('label'),
quality: issueData.getString('quality'),
httpUserAgent: issueData.getString('http_user_agent'),
httpReferrer: issueData.getString('http_referrer')
label: dataSet.getString('label'),
quality: dataSet.getString('quality'),
httpUserAgent: dataSet.getString('http_user_agent'),
httpReferrer: dataSet.getString('http_referrer')
}
if (data.label !== undefined) this.label = data.label

View File

@@ -18,7 +18,7 @@ import fs from 'node:fs'
export function isURI(string: string): boolean {
try {
const url = new URL(string)
return /^(http:|https:|mmsh:|rtsp:|rtmp:)/.test(url.protocol)
return /^(http:|https:|mms:|mmsh:|rtsp:|rtmp:|srt:|rtp:|udp:)/.test(url.protocol)
} catch {
return false
}
@@ -167,6 +167,7 @@ export async function loadIssues(props?: { labels: string | string[] }) {
per_page: 100,
labels,
status: 'open',
direction: 'asc',
headers: {
'X-GitHub-Api-Version': '2022-11-28'
}
@@ -293,3 +294,33 @@ function parseDiscussion(discussion: {
data: new DataSet(data)
})
}
class LogThread {
issue: Issue
type: string
constructor(issue: Issue, type: string) {
this.issue = issue
this.type = type
}
start() {
console.log(`[#${this.issue.number}] ${this.type}: Issue #${this.issue.number}`)
}
warn(message: string) {
console.log(`[#${this.issue.number}] ${this.type}: └── WARNING: ${message}`)
}
error(message: string) {
console.log(`[#${this.issue.number}] ${this.type}: └── ERROR: ${message}`)
}
info(message: string) {
console.log(`[#${this.issue.number}] ${this.type}: └── INFO: ${message}`)
}
}
export function createThread(issue: Issue, type: string): LogThread {
return new LogThread(issue, type)
}

View File

@@ -1,5 +0,0 @@
#EXTM3U
#EXTINF:-1 tvg-id="TFX.fr@SD",TFX (720p)
#EXTVLCOPT:http-referrer=https://pkpakiplay.xyz/
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (iPhone; CPU iPhone OS 17_7 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0 Mobile/15E148 Safari/604.1
https://stitcher-ipv4.pluto.tv/v1/stitch/embed/hls/channel/64c109a4798def0008a6e03e/master.mpd?advertisingId={PSID}&appVersion=unknown&deviceDNT={TARGETOPT}&deviceId={PSID}&deviceLat=0&deviceLon=0&deviceMake=samsung&deviceModel=samsung&deviceType=samsung-tvplus&deviceVersion=unknown&embedPartner=samsung-tvplus&profileFloor=&profileLimit=&samsung_app_domain={APP_DOMAIN}&samsung_app_name={APP_NAME}&us_privacy=1YNY

View File

@@ -0,0 +1 @@
closes #14175, closes #14105, closes #14104, closes #14057, closes #14034, closes #13964, closes #13893, closes #13881, closes #13793, closes #13751, closes #13715, closes #14110, closes #14120, closes #14151, closes #14150

View File

@@ -0,0 +1,5 @@
#EXTM3U
#EXTINF:-1 tvg-id="TFX.fr@SD",TFX
#EXTVLCOPT:http-referrer=https://pkpakiplay.xyz/
#EXTVLCOPT:http-user-agent=Mozilla/5.0 (iPhone; CPU iPhone OS 17_7 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0 Mobile/15E148 Safari/604.1
srt://stream.alabbassia.com:8890?mode=caller&latency=200&streamid=read:live/alabbassia

View File

@@ -538,7 +538,7 @@ module.exports = [
closed_at: null,
author_association: 'COLLABORATOR',
active_lock_reason: null,
body: '### Stream ID\n\nTFX.fr\n\n### Stream URL\n\nhttps://stitcher-ipv4.pluto.tv/v1/stitch/embed/hls/channel/64c109a4798def0008a6e03e/master.mpd?advertisingId={PSID}&appVersion=unknown&deviceDNT={TARGETOPT}&deviceId={PSID}&deviceLat=0&deviceLon=0&deviceMake=samsung&deviceModel=samsung&deviceType=samsung-tvplus&deviceVersion=unknown&embedPartner=samsung-tvplus&profileFloor=&profileLimit=&samsung_app_domain={APP_DOMAIN}&samsung_app_name={APP_NAME}&us_privacy=1YNY\n\n### Label\n\nNone\n\n### HTTP User Agent\n\nMozilla/5.0 (iPhone; CPU iPhone OS 17_7 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0 Mobile/15E148 Safari/604.1\n\n### HTTP Referrer\n\nhttps://pkpakiplay.xyz/\n\n### Notes (optional)\n\nSource: https://github.com/iptv-org/iptv-org.github.io/issues/1381\n\n### Contributing Guide\n\n- [X] I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)',
body: '### Stream ID\n\nTFX.fr\n\n### Stream URL\n\nsrt://stream.alabbassia.com:8890?mode=caller&latency=200&streamid=read:live/alabbassia\n\n### Label\n\nNone\n\n### HTTP User Agent\n\nMozilla/5.0 (iPhone; CPU iPhone OS 17_7 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/18.0 Mobile/15E148 Safari/604.1\n\n### HTTP Referrer\n\nhttps://pkpakiplay.xyz/\n\n### Notes (optional)\n\nSource: https://github.com/iptv-org/iptv-org.github.io/issues/1381\n\n### Contributing Guide\n\n- [X] I have read [Contributing Guide](https://github.com/iptv-org/iptv/blob/master/CONTRIBUTING.md)',
reactions: {
url: 'https://api.github.com/repos/iptv-org/iptv/issues/14175/reactions',
total_count: 0,

View File

@@ -4,7 +4,7 @@ import * as fs from 'fs-extra'
import { glob } from 'glob'
const ENV_VAR =
'cross-env DATA_DIR=tests/__data__/input/data STREAMS_DIR=tests/__data__/output/streams'
'cross-env DATA_DIR=tests/__data__/input/data STREAMS_DIR=tests/__data__/output/streams LOGS_DIR=tests/__data__/output/logs'
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
@@ -19,21 +19,25 @@ describe('playlist:update', () => {
const stdout = execSync(cmd, { encoding: 'utf8' })
if (process.env.DEBUG === 'true') console.log(cmd, stdout)
const files = glob.sync('tests/__data__/expected/playlist_update/*.m3u').map(filepath => {
const fileUrl = pathToFileURL(filepath).toString()
const pathToRemove = pathToFileURL('tests/__data__/expected/playlist_update/').toString()
const files = glob
.sync('tests/__data__/expected/playlist_update/streams/*.m3u')
.map(filepath => {
const fileUrl = pathToFileURL(filepath).toString()
const pathToRemove = pathToFileURL(
'tests/__data__/expected/playlist_update/streams/'
).toString()
return fileUrl.replace(pathToRemove, '')
})
return fileUrl.replace(pathToRemove, '')
})
files.forEach(filepath => {
expect(content(`tests/__data__/output/streams/${filepath}`)).toBe(
content(`tests/__data__/expected/playlist_update/${filepath}`)
content(`tests/__data__/expected/playlist_update/streams/${filepath}`)
)
})
expect(stdout).toBe(
'OUTPUT=closes #14151, closes #14150, closes #14110, closes #14120, closes #14175, closes #14105, closes #14104, closes #14057, closes #14034, closes #13964, closes #13893, closes #13881, closes #13793, closes #13751, closes #13715\n'
expect(content('tests/__data__/output/logs/playlist_update.log')).toBe(
content('tests/__data__/expected/playlist_update/playlist_update.log')
)
done()