mirror of https://github.com/iptv-org/iptv
				
				
				
			Update scripts
							parent
							
								
									89b87ad5c8
								
							
						
					
					
						commit
						bb2935878d
					
				@ -1,33 +0,0 @@
 | 
			
		||||
import { Storage, Logger, PlaylistParser, Collection, Database } from '../../core'
 | 
			
		||||
import { Stream, Playlist } from '../../models'
 | 
			
		||||
import { STREAMS_DIR, DB_DIR } from '../../constants'
 | 
			
		||||
 | 
			
		||||
async function main() {
 | 
			
		||||
  const logger = new Logger()
 | 
			
		||||
 | 
			
		||||
  logger.info(`looking for streams...`)
 | 
			
		||||
  const storage = new Storage(STREAMS_DIR)
 | 
			
		||||
  const parser = new PlaylistParser({
 | 
			
		||||
    storage
 | 
			
		||||
  })
 | 
			
		||||
  const files = await storage.list(`**/*.m3u`)
 | 
			
		||||
  let streams = new Collection()
 | 
			
		||||
  for (let filepath of files) {
 | 
			
		||||
    const playlist: Playlist = await parser.parse(filepath)
 | 
			
		||||
    streams = streams.concat(playlist.streams)
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  logger.info(`found ${streams.count()} streams`)
 | 
			
		||||
 | 
			
		||||
  logger.info('clean up the storage...')
 | 
			
		||||
  const dbStorage = new Storage(DB_DIR)
 | 
			
		||||
  await dbStorage.clear('streams.db')
 | 
			
		||||
 | 
			
		||||
  logger.info('saving streams to the database...')
 | 
			
		||||
  const db = new Database(DB_DIR)
 | 
			
		||||
  const dbStreams = await db.load('streams.db')
 | 
			
		||||
  const data = streams.map((stream: Stream) => stream.data()).all()
 | 
			
		||||
  await dbStreams.insert(data)
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
main()
 | 
			
		||||
@ -0,0 +1,67 @@
 | 
			
		||||
import { STREAMS_DIR, DATA_DIR } from '../../constants'
 | 
			
		||||
import { Storage, Logger, PlaylistParser, Collection } from '../../core'
 | 
			
		||||
import { Stream, Playlist, Channel } from '../../models'
 | 
			
		||||
import { program } from 'commander'
 | 
			
		||||
 | 
			
		||||
program.argument('[filepath]', 'Path to file to validate').parse(process.argv)
 | 
			
		||||
 | 
			
		||||
async function main() {
 | 
			
		||||
  const storage = new Storage(STREAMS_DIR)
 | 
			
		||||
  const logger = new Logger()
 | 
			
		||||
 | 
			
		||||
  logger.info('loading channels from api...')
 | 
			
		||||
  const dataStorage = new Storage(DATA_DIR)
 | 
			
		||||
  const channelsContent = await dataStorage.json('channels.json')
 | 
			
		||||
  const groupedChannels = new Collection(channelsContent)
 | 
			
		||||
    .map(data => new Channel(data))
 | 
			
		||||
    .keyBy((channel: Channel) => channel.id)
 | 
			
		||||
 | 
			
		||||
  logger.info('loading streams...')
 | 
			
		||||
  const parser = new PlaylistParser({ storage })
 | 
			
		||||
  const files = program.args.length ? program.args : await storage.list('**/*.m3u')
 | 
			
		||||
  let streams = await parser.parse(files)
 | 
			
		||||
 | 
			
		||||
  logger.info(`found ${streams.count()} streams`)
 | 
			
		||||
 | 
			
		||||
  logger.info('normalizing links...')
 | 
			
		||||
  streams = streams.map(stream => {
 | 
			
		||||
    stream.normalizeURL()
 | 
			
		||||
    return stream
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  logger.info('removing duplicates...')
 | 
			
		||||
  streams = streams.uniqBy(stream => stream.url)
 | 
			
		||||
 | 
			
		||||
  logger.info('removing wrong id...')
 | 
			
		||||
  streams = streams.map((stream: Stream) => {
 | 
			
		||||
    if (groupedChannels.missing(stream.channel)) {
 | 
			
		||||
      stream.channel = ''
 | 
			
		||||
    }
 | 
			
		||||
 | 
			
		||||
    return stream
 | 
			
		||||
  })
 | 
			
		||||
 | 
			
		||||
  logger.info('sorting links...')
 | 
			
		||||
  streams = streams.orderBy(
 | 
			
		||||
    [
 | 
			
		||||
      (stream: Stream) => stream.name,
 | 
			
		||||
      (stream: Stream) => parseInt(stream.quality.replace('p', '')),
 | 
			
		||||
      (stream: Stream) => stream.label,
 | 
			
		||||
      (stream: Stream) => stream.url
 | 
			
		||||
    ],
 | 
			
		||||
    ['asc', 'desc', 'asc', 'asc']
 | 
			
		||||
  )
 | 
			
		||||
 | 
			
		||||
  logger.info('saving...')
 | 
			
		||||
  const groupedStreams = streams.groupBy((stream: Stream) => stream.filepath)
 | 
			
		||||
  for (let filepath of groupedStreams.keys()) {
 | 
			
		||||
    const streams = groupedStreams.get(filepath) || []
 | 
			
		||||
 | 
			
		||||
    if (!streams.length) return
 | 
			
		||||
 | 
			
		||||
    const playlist = new Playlist(streams, { public: false })
 | 
			
		||||
    await storage.save(filepath, playlist.toString())
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
main()
 | 
			
		||||
@ -1,22 +0,0 @@
 | 
			
		||||
import Datastore from '@seald-io/nedb'
 | 
			
		||||
import * as path from 'path'
 | 
			
		||||
 | 
			
		||||
export class Database {
 | 
			
		||||
  rootDir: string
 | 
			
		||||
 | 
			
		||||
  constructor(rootDir: string) {
 | 
			
		||||
    this.rootDir = rootDir
 | 
			
		||||
  }
 | 
			
		||||
 | 
			
		||||
  async load(filepath: string) {
 | 
			
		||||
    const absFilepath = path.join(this.rootDir, filepath)
 | 
			
		||||
 | 
			
		||||
    return new Datastore({
 | 
			
		||||
      filename: path.resolve(absFilepath),
 | 
			
		||||
      autoload: true,
 | 
			
		||||
      onload: (error: Error): any => {
 | 
			
		||||
        if (error) console.error(error.message)
 | 
			
		||||
      }
 | 
			
		||||
    })
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
@ -0,0 +1,16 @@
 | 
			
		||||
import { Dictionary } from '../core'
 | 
			
		||||
 | 
			
		||||
type IssueProps = {
 | 
			
		||||
  number: number
 | 
			
		||||
  data: Dictionary
 | 
			
		||||
}
 | 
			
		||||
 | 
			
		||||
export class Issue {
 | 
			
		||||
  number: number
 | 
			
		||||
  data: Dictionary
 | 
			
		||||
 | 
			
		||||
  constructor({ number, data }: IssueProps) {
 | 
			
		||||
    this.number = number
 | 
			
		||||
    this.data = data
 | 
			
		||||
  }
 | 
			
		||||
}
 | 
			
		||||
					Loading…
					
					
				
		Reference in New Issue