Update commands/

This commit is contained in:
Aleksandr Statciuk
2023-04-27 17:41:54 +03:00
parent ea4592aff2
commit a752e6a5fd
9 changed files with 14 additions and 403 deletions

View File

@@ -1,47 +0,0 @@
const { logger, parser, db, date } = require('../../core')
const { program } = require('commander')
const options = program
.option(
'-t, --threshold <threshold>',
'Number of days after which the stream should be deleted',
parser.parseNumber,
7
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
.parse(process.argv)
.opts()
async function main() {
await db.streams.load()
const streams = await db.streams.all()
let buffer = {}
let removed = 0
logger.info('searching...')
for (const stream of streams) {
if (
stream.status === 'error' &&
date.utc().diff(stream.updated_at, 'day') >= options.threshold
) {
logger.info(`${stream.url} (offline)`)
removed += await db.streams.remove({ url: stream.url }, { multi: true })
}
const key = stream.url.toLowerCase()
if (buffer[key]) {
logger.info(`${stream.url} (duplicate)`)
await db.streams.remove({ _id: stream._id })
removed++
} else {
buffer[key] = true
}
}
await db.streams.compact()
logger.info(`removed ${removed} streams`)
}
main()

View File

@@ -1,34 +1,25 @@
const { db, file, parser, store, logger, id, api } = require('../../core')
const { db, file, parser, store, logger, api } = require('../../core')
const { program } = require('commander')
const _ = require('lodash')
const options = program
.option(
'--max-clusters <max-clusters>',
'Set maximum number of clusters',
parser.parseNumber,
256
)
.option('--input-dir <input-dir>', 'Set path to input directory', 'streams')
.parse(process.argv)
.opts()
async function main() {
logger.info('starting...')
logger.info(`number of clusters: ${options.maxClusters}`)
await saveToDatabase(await findStreams())
logger.info('done')
}
main()
async function findStreams() {
logger.info(`looking for streams...`)
logger.info(`loading channels...`)
await api.channels.load()
await api.streams.load()
logger.info(`looking for streams...`)
await db.streams.load()
const streams = []
@@ -40,7 +31,6 @@ async function findStreams() {
const stream = store.create()
const channel = await api.channels.find({ id: item.tvg.id })
const cached = (await api.streams.find({ url: item.url })) || {}
stream.set('channel', { channel: channel ? channel.id : null })
stream.set('title', { title: item.name })
@@ -48,14 +38,6 @@ async function findStreams() {
stream.set('url', { url: item.url })
stream.set('http_referrer', { http_referrer: item.http.referrer })
stream.set('user_agent', { user_agent: item.http['user-agent'] })
stream.set('status', { status: cached.status || 'online' })
stream.set('width', { width: cached.width || 0 })
stream.set('height', { height: cached.height || 0 })
stream.set('bitrate', { bitrate: cached.bitrate || 0 })
stream.set('frame_rate', { frame_rate: cached.frame_rate || 0 })
stream.set('added_at', { added_at: cached.added_at })
stream.set('updated_at', { updated_at: cached.updated_at })
stream.set('checked_at', { checked_at: cached.checked_at })
streams.push(stream)
}
@@ -69,20 +51,7 @@ async function saveToDatabase(streams = []) {
logger.info('saving to the database...')
await db.streams.reset()
const chunks = split(_.shuffle(streams), options.maxClusters)
for (const [i, chunk] of chunks.entries()) {
for (const stream of chunk) {
stream.set('cluster_id', { cluster_id: i + 1 })
await db.streams.insert(stream.data())
}
for (const stream of streams) {
await db.streams.insert(stream.data())
}
}
function split(arr, n) {
let result = []
for (let i = n; i > 0; i--) {
result.push(arr.splice(0, Math.ceil(arr.length / i)))
}
return result
}

View File

@@ -1,15 +1,12 @@
const { logger, db, api, file } = require('../../core')
const { logger, db, file } = require('../../core')
const _ = require('lodash')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
dayjs.extend(utc)
const PUBLIC_DIR = process.env.PUBLIC_DIR || '.api'
async function main() {
await api.streams.load()
logger.info(`loading streams...`)
await db.streams.load()
const now = dayjs.utc().format()
let streams = await db.streams.find({})
streams = _.sortBy(streams, 'channel')
streams = streams.map(stream => {
@@ -17,36 +14,14 @@ async function main() {
channel: stream.channel,
url: stream.url,
http_referrer: stream.http_referrer,
user_agent: stream.user_agent,
status: stream.status,
width: stream.width,
height: stream.height,
bitrate: stream.bitrate,
frame_rate: stream.frame_rate
user_agent: stream.user_agent
}
let addedAt = now
let updatedAt = now
let found = api.streams.find({ url: stream.url })
if (found) {
data = JSON.parse(JSON.stringify(data))
normalized = _.omit(found, ['added_at', 'updated_at', 'checked_at'])
if (_.isEqual(data, normalized)) {
addedAt = found.added_at || now
updatedAt = found.updated_at || now
} else {
addedAt = found.added_at || now
updatedAt = now
}
}
data.added_at = addedAt
data.updated_at = updatedAt
data.checked_at = now
return data
})
logger.info(`found ${streams.length} streams`)
logger.info('saving to .api/streams.json...')
await file.create(`${PUBLIC_DIR}/streams.json`, JSON.stringify(streams))
}

View File

@@ -1,16 +0,0 @@
const { logger, db } = require('../../core')
async function main() {
await db.streams.load()
const docs = await db.streams.find({}).sort({ cluster_id: 1 })
const cluster_id = docs.reduce((acc, curr) => {
if (!acc.includes(curr.cluster_id)) acc.push(curr.cluster_id)
return acc
}, [])
const matrix = { cluster_id }
const output = `MATRIX=${JSON.stringify(matrix)}`
logger.info(output)
}
main()

View File

@@ -1,161 +0,0 @@
const { db, store, parser, file, logger } = require('../../core')
const _ = require('lodash')
const dayjs = require('dayjs')
const utc = require('dayjs/plugin/utc')
dayjs.extend(utc)
const LOGS_DIR = process.env.LOGS_DIR || 'scripts/logs/cluster/load'
async function main() {
const streams = await loadStreams()
const results = await loadResults()
const origins = await loadOrigins(results)
await updateStreams(streams, results, origins)
}
main()
async function updateStreams(items = [], results = {}, origins = {}) {
logger.info('updating streams...')
let updated = 0
const now = dayjs.utc().format()
for (const item of items) {
const stream = store.create(item)
const result = results[item._id]
if (result) {
const status = parseStatus(result.error)
if (status) {
stream.set('status', { status })
}
if (result.streams.length) {
const { width, height, bitrate, frame_rate } = parseMediaInfo(result.streams)
stream.set('width', { width })
stream.set('height', { height })
stream.set('bitrate', { bitrate })
stream.set('frame_rate', { frame_rate })
}
if (result.requests.length) {
const origin = findOrigin(result.requests, origins)
if (origin) {
stream.set('url', { url: origin })
}
}
}
if (stream.changed) {
stream.set('updated_at', { updated_at: now })
await db.streams.update({ _id: stream.get('_id') }, stream.data())
updated++
}
}
db.streams.compact()
logger.info(`updated ${updated} streams`)
logger.info('done')
}
async function loadStreams() {
logger.info('loading streams...')
await db.streams.load()
const streams = await db.streams.find({})
logger.info(`found ${streams.length} streams`)
return streams
}
async function loadResults() {
logger.info('loading check results...')
const results = {}
const files = await file.list(`${LOGS_DIR}/cluster_*.log`)
for (const filepath of files) {
const parsed = await parser.parseLogs(filepath)
for (const item of parsed) {
results[item._id] = item
}
}
logger.info(`found ${Object.values(results).length} results`)
return results
}
async function loadOrigins(results = {}) {
logger.info('loading origins...')
const origins = {}
for (const { error, requests } of Object.values(results)) {
if (error || !Array.isArray(requests) || !requests.length) continue
let origin = requests.shift()
origin = new URL(origin.url)
for (const request of requests) {
const curr = new URL(request.url)
const key = curr.href.replace(/(^\w+:|^)/, '')
if (!origins[key] && curr.host === origin.host) {
origins[key] = origin.href
}
}
}
logger.info(`found ${_.uniq(Object.values(origins)).length} origins`)
return origins
}
function findOrigin(requests = [], origins = {}) {
if (origins && Array.isArray(requests)) {
requests = requests.map(r => r.url.replace(/(^\w+:|^)/, ''))
for (const url of requests) {
if (origins[url]) {
return origins[url]
}
}
}
return null
}
function parseMediaInfo(streams) {
streams = streams.filter(s => s.codec_type === 'video')
streams = streams.map(s => {
s.bitrate = s.tags && s.tags.variant_bitrate ? parseInt(s.tags.variant_bitrate) : 0
s.frame_rate = parseFrameRate(s.avg_frame_rate)
return s
})
streams = _.orderBy(streams, ['height', 'bitrate'], ['desc', 'desc'])
return _.head(streams) || {}
}
function parseFrameRate(frame_rate = '0/0') {
const parts = frame_rate.split('/')
const number = parseInt(parts[0]) / parseInt(parts[1])
return number > 0 ? Math.round(number * 100) / 100 : 0
}
function parseStatus(error) {
if (!error) return 'online'
switch (error.code) {
case 'FFMPEG_UNDEFINED':
return null
case 'HTTP_REQUEST_TIMEOUT':
return 'timeout'
case 'HTTP_FORBIDDEN':
case 'HTTP_UNAUTHORIZED':
case 'HTTP_UNAVAILABLE_FOR_LEGAL_REASONS':
return 'blocked'
default:
return 'error'
}
}