Update tests/

This commit is contained in:
Aleksandr Statciuk
2023-04-27 17:41:38 +03:00
parent ef25745f52
commit 77a620cd48
86 changed files with 126 additions and 459 deletions

View File

@@ -1,35 +0,0 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync(
'tests/__data__/input/database/cluster_load.streams.db',
'tests/__data__/output/streams.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/output LOGS_DIR=tests/__data__/output/logs/cluster/load npm run cluster:load -- --cluster-id=1 --timeout=1',
{ encoding: 'utf8' }
)
})
it('return results', () => {
expect(content('tests/__data__/output/logs/cluster/load/cluster_1.log')).toEqual(
content('tests/__data__/expected/logs/cluster/load/cluster_1.log')
)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@@ -1,48 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/output/database')
fs.copyFileSync(
'tests/__data__/input/database/db_clear.streams.db',
'tests/__data__/output/database/streams.db'
)
const stdout = execSync(
'DB_DIR=tests/__data__/output/database npm run db:clear -- --threshold 7',
{
encoding: 'utf8'
}
)
})
it('can clear database', () => {
let output = content('tests/__data__/output/database/streams.db')
let expected = content('tests/__data__/expected/database/db_clear.streams.db')
output = output.map(i => {
i._id = null
return i
})
expected = expected.map(i => {
i._id = null
return i
})
expect(output).toMatchObject(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@@ -7,7 +7,7 @@ beforeEach(() => {
fs.mkdirSync('tests/__data__/output/database')
const stdout = execSync(
'DB_DIR=tests/__data__/output/database DATA_DIR=tests/__data__/input/data npm run db:create -- --input-dir=tests/__data__/input/streams --max-clusters=1',
'DB_DIR=tests/__data__/output/database DATA_DIR=tests/__data__/input/data npm run db:create -- --input-dir=tests/__data__/input/streams',
{ encoding: 'utf8' }
)
})

View File

@@ -1,7 +1,5 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const _ = require('lodash')
const dayjs = require('dayjs')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
@@ -18,45 +16,7 @@ beforeEach(() => {
})
it('can create streams.json', () => {
let api = content('input/data/streams.json')
let output = content(`output/.api/streams.json`)
let expected = content(`expected/.api/streams.json`)
const samples = {
unchanged_online: 'https://master.starmena-cloud.com/hls/libyas.m3u8',
unchanged_error: 'https://iptv-all.lanesh4d0w.repl.co/andorra/atv',
updated_error: 'http://46.46.143.222:1935/live/mp4:ldpr.stream/playlist.m3u8',
added_online: 'https://master.starmena-cloud.com/hls/bbc.m3u8'
}
let outputData, savedData
outputData = output.find(i => i.url === samples['unchanged_online'])
savedData = api.find(i => i.url === samples['unchanged_online'])
expect(outputData.added_at).toBe(savedData.added_at)
expect(outputData.updated_at).toBe(savedData.updated_at)
expect(dayjs().diff(outputData.checked_at, 'h')).toBe(0)
outputData = output.find(i => i.url === samples['unchanged_error'])
savedData = api.find(i => i.url === samples['unchanged_error'])
expect(outputData.added_at).toBe(savedData.added_at)
expect(outputData.updated_at).toBe(savedData.updated_at)
expect(dayjs().diff(outputData.checked_at, 'h')).toBe(0)
outputData = output.find(i => i.url === samples['updated_error'])
savedData = api.find(i => i.url === samples['unchanged_error'])
expect(outputData.added_at).toBe(savedData.added_at)
expect(dayjs().diff(outputData.updated_at, 'h')).toBe(0)
expect(dayjs().diff(outputData.checked_at, 'h')).toBe(0)
outputData = output.find(i => i.url === samples['added_online'])
expect(dayjs().diff(outputData.added_at, 'h')).toBe(0)
expect(dayjs().diff(outputData.updated_at, 'h')).toBe(0)
expect(dayjs().diff(outputData.checked_at, 'h')).toBe(0)
expect(output.map(item => _.omit(item, ['added_at', 'updated_at', 'checked_at']))).toMatchObject(
expected.map(item => _.omit(item, ['added_at', 'updated_at', 'checked_at']))
)
expect(content(`output/.api/streams.json`)).toMatchObject(content(`expected/.api/streams.json`))
})
function content(filepath) {

View File

@@ -1,21 +0,0 @@
const fs = require('fs-extra')
const path = require('path')
const { execSync } = require('child_process')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/output/database')
fs.copyFileSync(
'tests/__data__/input/database/db_matrix.streams.db',
'tests/__data__/output/database/streams.db'
)
})
it('can create valid matrix', () => {
const result = execSync('DB_DIR=tests/__data__/output/database npm run db:matrix', {
encoding: 'utf8'
})
expect(result).toBe(
'\n> db:matrix\n> node scripts/commands/database/matrix.js\n\nMATRIX={"cluster_id":[1,3]}\n'
)
})

View File

@@ -1,61 +0,0 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.mkdirSync('tests/__data__/output/database')
fs.copyFileSync(
'tests/__data__/input/database/db_update.streams.db',
'tests/__data__/output/database/streams.db'
)
})
it('can save results', () => {
const stdout = execSync(
'DB_DIR=tests/__data__/output/database LOGS_DIR=tests/__data__/input/logs/cluster/load npm run db:update',
{ encoding: 'utf8' }
)
expect(stdout).toEqual(`
> db:update
> node scripts/commands/database/update.js
loading streams...
found 10 streams
loading check results...
found 6 results
loading origins...
found 2 origins
updating streams...
updated 6 streams
done
`)
let input = content('tests/__data__/input/database/db_update.streams.db')
let output = content('tests/__data__/output/database/streams.db')
let expected = content('tests/__data__/expected/database/db_update.streams.db')
let inputDate = input.find(i => i._id === '2ST8btby3mmsgPF0')['updated_at']
let outputDate = output.find(i => i._id === '2ST8btby3mmsgPF0')['updated_at']
expect(outputDate).not.toEqual(inputDate)
output = output.map(i => {
delete i['updated_at']
return i
})
expect(output).toEqual(expected)
})
function content(filepath) {
const data = fs.readFileSync(path.resolve(filepath), {
encoding: 'utf8'
})
return data
.split('\n')
.filter(l => l)
.map(l => {
return JSON.parse(l)
})
}

View File

@@ -10,7 +10,7 @@ beforeEach(() => {
'tests/__data__/output/streams.db'
)
const stdout = execSync(
execSync(
'DB_DIR=tests/__data__/output DATA_DIR=tests/__data__/input/data PUBLIC_DIR=tests/__data__/output/.gh-pages LOGS_DIR=tests/__data__/output/logs/generators npm run playlist:generate',
{ encoding: 'utf8' }
)

View File

@@ -1,32 +0,0 @@
const { execSync } = require('child_process')
const fs = require('fs-extra')
const path = require('path')
const glob = require('glob')
beforeEach(() => {
fs.emptyDirSync('tests/__data__/output')
fs.copyFileSync(
'tests/__data__/input/database/playlist_update.streams.db',
'tests/__data__/output/streams.db'
)
const stdout = execSync('DB_DIR=tests/__data__/output npm run playlist:update', {
encoding: 'utf8'
})
})
it('can update playlists', () => {
const files = glob
.sync('tests/__data__/expected/streams/*.m3u')
.map(f => f.replace('tests/__data__/expected/', ''))
files.forEach(filepath => {
expect(content(`output/${filepath}`), filepath).toBe(content(`expected/${filepath}`))
})
})
function content(filepath) {
return fs.readFileSync(`tests/__data__/${filepath}`, {
encoding: 'utf8'
})
}

View File

@@ -13,7 +13,7 @@ it('show an error if channel name in the blocklist', () => {
} catch (err) {
expect(err.status).toBe(1)
expect(err.stdout).toBe(
`\n> playlist:validate\n> node scripts/commands/playlist/validate.js\n\nloading blocklist...\nfound 4 records\n\ntests/__data__/input/streams/us_blocked.m3u\n 2 error "Fox Sports 2 Asia" is on the blocklist due to claims of copyright holders (https://github.com/iptv-org/iptv/issues/0000)\n\n1 problems (1 errors, 0 warnings)\n`
`\n> playlist:validate\n> node scripts/commands/playlist/validate.js tests/__data__/input/streams/us_blocked.m3u\n\nloading blocklist...\nfound 4 records\n\ntests/__data__/input/streams/us_blocked.m3u\n 2 error "Fox Sports 2 Asia" is on the blocklist due to claims of copyright holders (https://github.com/iptv-org/iptv/issues/0000)\n\n1 problems (1 errors, 0 warnings)\n`
)
}
})
@@ -27,6 +27,6 @@ it('show a warning if channel has wrong id', () => {
)
expect(stdout).toBe(
`\n> playlist:validate\n> node scripts/commands/playlist/validate.js\n\nloading blocklist...\nfound 4 records\n\ntests/__data__/input/streams/wrong_id.m3u\n 2 warning "qib22lAq1L.us" is not in the database\n\n1 problems (0 errors, 1 warnings)\n`
`\n> playlist:validate\n> node scripts/commands/playlist/validate.js tests/__data__/input/streams/wrong_id.m3u\n\nloading blocklist...\nfound 4 records\n\ntests/__data__/input/streams/wrong_id.m3u\n 2 warning "qib22lAq1L.us" is not in the database\n\n1 problems (0 errors, 1 warnings)\n`
)
})