Compare commits
No commits in common. "7030c15b655824f106e97f42db8b9d3e22e1817f" and "68bf2ec2e86597bf570e7aa3195c8b72bc45db5c" have entirely different histories.
7030c15b65
...
68bf2ec2e8
3 changed files with 7 additions and 73 deletions
2
init.sh
2
init.sh
|
@ -11,8 +11,8 @@ mkdir -v -p data/restreamer/config
|
||||||
mkdir -v -p data/restreamer/data
|
mkdir -v -p data/restreamer/data
|
||||||
|
|
||||||
# scheduler
|
# scheduler
|
||||||
mkdir -v -p data/recorder/live
|
|
||||||
mkdir -v -p data/recorder/vod
|
mkdir -v -p data/recorder/vod
|
||||||
|
mkdir -v -p data/recorder/live
|
||||||
mkdir -v -p data/recorder/thumb
|
mkdir -v -p data/recorder/thumb
|
||||||
|
|
||||||
# icecast
|
# icecast
|
||||||
|
|
|
@ -1,7 +1,6 @@
|
||||||
import time
|
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
import subprocess
|
import time
|
||||||
import logging
|
import logging
|
||||||
import json
|
import json
|
||||||
import requests
|
import requests
|
||||||
|
@ -26,12 +25,10 @@ logger_job.setLevel(log_level)
|
||||||
logger_content = logging.getLogger('content')
|
logger_content = logging.getLogger('content')
|
||||||
|
|
||||||
# Variables
|
# Variables
|
||||||
|
core_sync_period = int(os.environ.get('CORE_SYNC_PERIOD', 15))
|
||||||
core_hostname = os.environ.get('CORE_API_HOSTNAME', 'stream.example.com')
|
core_hostname = os.environ.get('CORE_API_HOSTNAME', 'stream.example.com')
|
||||||
core_username = os.environ.get('CORE_API_AUTH_USERNAME', 'admin')
|
core_username = os.environ.get('CORE_API_AUTH_USERNAME', 'admin')
|
||||||
core_password = os.environ.get('CORE_API_AUTH_PASSWORD', 'pass')
|
core_password = os.environ.get('CORE_API_AUTH_PASSWORD', 'pass')
|
||||||
core_sync_period = int(os.environ.get('CORE_SYNC_PERIOD', 15))
|
|
||||||
hls_converter_period = 180
|
|
||||||
|
|
||||||
rec_path = "/recordings"
|
rec_path = "/recordings"
|
||||||
enable_delay = 24
|
enable_delay = 24
|
||||||
|
|
||||||
|
@ -55,6 +52,7 @@ def get_core_process_details(client, process_id):
|
||||||
|
|
||||||
# Process a running channel
|
# Process a running channel
|
||||||
def process_running_channel(database, scheduler, stream_id, stream_name, stream_description, stream_hls_url):
|
def process_running_channel(database, scheduler, stream_id, stream_name, stream_description, stream_hls_url):
|
||||||
|
global recording
|
||||||
if stream_id in database:
|
if stream_id in database:
|
||||||
# Skip learned channels
|
# Skip learned channels
|
||||||
return
|
return
|
||||||
|
@ -247,67 +245,6 @@ def exec_recorder(stream_id, stream_name, stream_hls_url):
|
||||||
rechead = {}
|
rechead = {}
|
||||||
logger_job.warning(f'Rechead reset.')
|
logger_job.warning(f'Rechead reset.')
|
||||||
|
|
||||||
# HLS Converter
|
|
||||||
def hls_converter():
|
|
||||||
directory = f'{rec_path}/vod/'
|
|
||||||
try:
|
|
||||||
# Check if the directory exists
|
|
||||||
if not os.path.exists(directory):
|
|
||||||
raise FileNotFoundError(f"The directory '{directory}' does not exist.")
|
|
||||||
|
|
||||||
# Iterate through all entries in the directory
|
|
||||||
for entry in os.listdir(directory):
|
|
||||||
file_path = os.path.join(directory, entry)
|
|
||||||
if entry.lower().endswith('.mp4'):
|
|
||||||
input_file = file_path
|
|
||||||
break
|
|
||||||
logger_job.warning(f'{input_file} found. Converting to HLS...')
|
|
||||||
|
|
||||||
ffmpeg -i input_video.mp4 \
|
|
||||||
-filter_complex \
|
|
||||||
"[0:v]split=3[v1][v2][v3]; \
|
|
||||||
[v1]scale=w=1920:h=1080[v1out]; \
|
|
||||||
[v2]scale=w=1280:h=720[v2out]; \
|
|
||||||
[v3]scale=w=854:h=480[v3out]" \
|
|
||||||
-map "[v1out]" -c:v:0 libx264 -b:v:0 5000k -maxrate:v:0 5350k -bufsize:v:0 7500k \
|
|
||||||
-map "[v2out]" -c:v:1 libx264 -b:v:1 2800k -maxrate:v:1 2996k -bufsize:v:1 4200k \
|
|
||||||
-map "[v3out]" -c:v:2 libx264 -b:v:2 1400k -maxrate:v:2 1498k -bufsize:v:2 2100k \
|
|
||||||
-map a:0 -c:a aac -b:a:0 192k -ac 2 \
|
|
||||||
-map a:0 -c:a aac -b:a:1 128k -ac 2 \
|
|
||||||
-map a:0 -c:a aac -b:a:2 96k -ac 2 \
|
|
||||||
-f hls \
|
|
||||||
-hls_time 10 \
|
|
||||||
-hls_playlist_type vod \
|
|
||||||
-hls_flags independent_segments \
|
|
||||||
-hls_segment_type mpegts \
|
|
||||||
-hls_segment_filename stream_%v/data%03d.ts \
|
|
||||||
-master_pl_name master.m3u8 \
|
|
||||||
-var_stream_map "v:0,a:0 v:1,a:1 v:2,a:2" stream_%v/playlist.m3u8
|
|
||||||
|
|
||||||
|
|
||||||
ffmpeg -i brooklynsfinest_clip_1080p.mp4 \
|
|
||||||
-filter_complex \
|
|
||||||
"[0:v]split=3[v1][v2][v3]; \
|
|
||||||
[v1]copy[v1out]; [v2]scale=w=1280:h=720[v2out]; [v3]scale=w=640:h=360[v3out]" \
|
|
||||||
-map "[v1out]" -c:v:0 libx264 -x264-params "nal-hrd=cbr:force-cfr=1" -b:v:0 5M -maxrate:v:0 5M -minrate:v:0 5M -bufsize:v:0 10M -preset slow -g 48 -sc_threshold 0 -keyint_min 48 \
|
|
||||||
-map "[v2out]" -c:v:1 libx264 -x264-params "nal-hrd=cbr:force-cfr=1" -b:v:1 3M -maxrate:v:1 3M -minrate:v:1 3M -bufsize:v:1 3M -preset slow -g 48 -sc_threshold 0 -keyint_min 48 \
|
|
||||||
-map "[v3out]" -c:v:2 libx264 -x264-params "nal-hrd=cbr:force-cfr=1" -b:v:2 1M -maxrate:v:2 1M -minrate:v:2 1M -bufsize:v:2 1M -preset slow -g 48 -sc_threshold 0 -keyint_min 48 \
|
|
||||||
-map a:0 -c:a:0 aac -b:a:0 96k -ac 2 \
|
|
||||||
-map a:0 -c:a:1 aac -b:a:1 96k -ac 2 \
|
|
||||||
-map a:0 -c:a:2 aac -b:a:2 48k -ac 2 \
|
|
||||||
-f hls \
|
|
||||||
-hls_time 2 \
|
|
||||||
-hls_playlist_type vod \
|
|
||||||
-hls_flags independent_segments \
|
|
||||||
-hls_segment_type mpegts \
|
|
||||||
-hls_segment_filename stream_%v/data%02d.ts \
|
|
||||||
-master_pl_name master.m3u8 \
|
|
||||||
-var_stream_map "v:0,a:0 v:1,a:1 v:2,a:2" stream_%v.m3u8
|
|
||||||
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger_job.error(e)
|
|
||||||
|
|
||||||
# Datarhei CORE API sync
|
# Datarhei CORE API sync
|
||||||
def core_api_sync():
|
def core_api_sync():
|
||||||
global database
|
global database
|
||||||
|
@ -366,13 +303,10 @@ except Exception as err:
|
||||||
logger_api.error('Restarting...')
|
logger_api.error('Restarting...')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Schedule API sync job
|
# Schedule sync jobs
|
||||||
scheduler.add_job(func=core_api_sync, trigger='interval', seconds=core_sync_period, id='core_api_sync')
|
scheduler.add_job(func=core_api_sync, trigger='interval', seconds=core_sync_period, id='core_api_sync')
|
||||||
scheduler.get_job('core_api_sync').modify(next_run_time=datetime.now())
|
scheduler.get_job('core_api_sync').modify(next_run_time=datetime.now())
|
||||||
|
|
||||||
# Schedule HLS converter job
|
|
||||||
scheduler.add_job(func=hls_converter, trigger='interval', seconds=hls_converter_period, id='hls_converter')
|
|
||||||
|
|
||||||
# Start the scheduler
|
# Start the scheduler
|
||||||
scheduler.start()
|
scheduler.start()
|
||||||
|
|
||||||
|
|
|
@ -152,7 +152,7 @@ async def query_database():
|
||||||
rechead = {}
|
rechead = {}
|
||||||
|
|
||||||
# Creating an embed
|
# Creating an embed
|
||||||
img_url = f'https://{scheduler_hostname/static/images'
|
img_url = f'https://{scheduler_hostname}/img'
|
||||||
thumb_url = f'https://{scheduler_hostname}/thumb/{thumb_filename}'
|
thumb_url = f'https://{scheduler_hostname}/thumb/{thumb_filename}'
|
||||||
video_download_url = f'https://{scheduler_hostname}/video/download/{video_filename}'
|
video_download_url = f'https://{scheduler_hostname}/video/download/{video_filename}'
|
||||||
video_filename_no_extension = video_filename.split('.')[0]
|
video_filename_no_extension = video_filename.split('.')[0]
|
||||||
|
@ -168,7 +168,7 @@ async def query_database():
|
||||||
embed.add_field(name="Watch",
|
embed.add_field(name="Watch",
|
||||||
value=f'[plyr.js player]({video_watch_url}) :]',
|
value=f'[plyr.js player]({video_watch_url}) :]',
|
||||||
inline=True)
|
inline=True)
|
||||||
embed.set_image(url=thumb_url)
|
#embed.set_image(url=thumb_url)
|
||||||
#embed.set_thumbnail(url=f'{img_url}/logo-96.png')
|
#embed.set_thumbnail(url=f'{img_url}/logo-96.png')
|
||||||
embed.set_footer(text="DeflaxTV",
|
embed.set_footer(text="DeflaxTV",
|
||||||
icon_url=f'{img_url}/logo-96.png')
|
icon_url=f'{img_url}/logo-96.png')
|
||||||
|
|
Loading…
Add table
Reference in a new issue