migrate scheduler and archive services
This commit is contained in:
parent
bb9789fd54
commit
1f46c23fd8
8 changed files with 156 additions and 1 deletions
3
.gitignore
vendored
3
.gitignore
vendored
|
@ -160,5 +160,6 @@ cython_debug/
|
||||||
#.idea/
|
#.idea/
|
||||||
|
|
||||||
|
|
||||||
# tv
|
# television
|
||||||
certbot.env
|
certbot.env
|
||||||
|
logs/
|
||||||
|
|
2
src/archive/Dockerfile
Normal file
2
src/archive/Dockerfile
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
FROM nginx
|
||||||
|
COPY index.html /usr/share/nginx/html
|
1
src/archive/index.html
Normal file
1
src/archive/index.html
Normal file
|
@ -0,0 +1 @@
|
||||||
|
hi from the archive :)
|
16
src/scheduler/Dockerfile
Normal file
16
src/scheduler/Dockerfile
Normal file
|
@ -0,0 +1,16 @@
|
||||||
|
FROM python:3-bookworm
|
||||||
|
|
||||||
|
WORKDIR /python-docker
|
||||||
|
|
||||||
|
COPY requirements.txt requirements.txt
|
||||||
|
RUN pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
COPY requirements-pydantic-pin.txt requirements-pydantic-pin.txt
|
||||||
|
RUN pip3 install -r requirements-pydantic-pin.txt
|
||||||
|
RUN pip3 install https://github.com/datarhei/core-client-python/archive/refs/tags/1.1.0.tar.gz
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
|
||||||
|
EXPOSE 8080
|
||||||
|
|
||||||
|
CMD [ "waitress-serve", "--port=8080", "--call", "app:create_app"]
|
113
src/scheduler/app.py
Normal file
113
src/scheduler/app.py
Normal file
|
@ -0,0 +1,113 @@
|
||||||
|
import os
|
||||||
|
import logging
|
||||||
|
import asyncio
|
||||||
|
from datetime import datetime
|
||||||
|
from schedule_manager import ScheduleManager
|
||||||
|
from flask import Flask, render_template, jsonify, request
|
||||||
|
from core_client import Client
|
||||||
|
|
||||||
|
app = Flask(__name__)
|
||||||
|
manager = ScheduleManager()
|
||||||
|
logger = logging.getLogger('waitress')
|
||||||
|
logger.setLevel(os.environ.get('LOG_LEVEL', 'INFO').upper())
|
||||||
|
database = {}
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
api_hostname = os.environ.get('CORE_API_HOSTNAME')
|
||||||
|
api_port = os.environ.get('CORE_API_PORT')
|
||||||
|
api_username = os.environ.get('CORE_API_USERNAME')
|
||||||
|
api_password=os.environ.get('CORE_API_PASSWORD')
|
||||||
|
|
||||||
|
iway = {
|
||||||
|
"head": "https://stream.deflax.net/memfs/ac2172fa-d8d5-4487-8bc6-5347dcf7c0dc.m3u8"
|
||||||
|
}
|
||||||
|
|
||||||
|
cam = {
|
||||||
|
"head": "https://stream.deflax.net/memfs/37890510-5ff7-4387-866f-516468cea43f.m3u8"
|
||||||
|
}
|
||||||
|
|
||||||
|
jt = {
|
||||||
|
"head": "https://stream.deflax.net/memfs/6e5b4949-910d-4ec9-8ed8-1a3d8bb5138e.m3u8"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Datarhei Core API integration
|
||||||
|
SYNC_PERIOD = 30
|
||||||
|
try:
|
||||||
|
client = Client(base_url='https://' + api_hostname, username=api_username, password=api_password)
|
||||||
|
logger.info('Logging in to Datarhei Core API ' + api_username + '@' + api_hostname)
|
||||||
|
client.login()
|
||||||
|
except Exception as err:
|
||||||
|
logger.error(err)
|
||||||
|
|
||||||
|
def core_api_sync():
|
||||||
|
new_ids = []
|
||||||
|
try:
|
||||||
|
process_list = client.v3_process_get_list()
|
||||||
|
except Exception as err:
|
||||||
|
logger.error('process_get_list error')
|
||||||
|
return True
|
||||||
|
for process in process_list:
|
||||||
|
get_process = client.v3_process_get(id=process.id)
|
||||||
|
stream_id = get_process.reference
|
||||||
|
meta = get_process.metadata
|
||||||
|
state = get_process.state
|
||||||
|
if meta is None:
|
||||||
|
# Skip processes without metadata
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
if meta['restreamer-ui'].get('meta') is None:
|
||||||
|
# Skip processes without meta key
|
||||||
|
#logger.warn('{} does not have a meta key'.format(stream_id))
|
||||||
|
continue
|
||||||
|
new_ids.append(stream_id)
|
||||||
|
stream_name = meta['restreamer-ui']['meta']['name']
|
||||||
|
stream_description = meta['restreamer-ui']['meta']['description']
|
||||||
|
stream_storage_type = meta['restreamer-ui']['control']['hls']['storage']
|
||||||
|
stream_hls_url = 'https://{}/{}/{}.m3u8'.format(api_hostname, stream_storage_type, stream_id)
|
||||||
|
|
||||||
|
payload = { stream_id: { 'name': stream_name, 'meta': stream_description, 'src': stream_hls_url } }
|
||||||
|
|
||||||
|
if state.exec == "running":
|
||||||
|
# Register a running channel to the database
|
||||||
|
if stream_id in database:
|
||||||
|
# Skip overwriting channel
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.info('{} ({}) has been registered to the database'.format(stream_id, stream_name))
|
||||||
|
database.update(payload)
|
||||||
|
else:
|
||||||
|
# Remove from the database if the state is changed
|
||||||
|
if stream_id in database:
|
||||||
|
logger.info('{} ({}) has been removed from the database. Reason: {}'.format(stream_id, stream_name, state.exec))
|
||||||
|
database.pop(stream_id)
|
||||||
|
new_ids.remove(stream_id)
|
||||||
|
|
||||||
|
# Cleanup orphaned references
|
||||||
|
marked_keys = []
|
||||||
|
for key in database:
|
||||||
|
if key in new_ids:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
logger.info('Key {} is an orphan. Removing.'.format(key))
|
||||||
|
marked_keys.append(key)
|
||||||
|
|
||||||
|
for marked_key in marked_keys:
|
||||||
|
database.pop(marked_key)
|
||||||
|
|
||||||
|
def analyze_db():
|
||||||
|
#logger.info(database)
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Schedule a periodic task: sync datarhei core api
|
||||||
|
manager.register_task(name="core_api_sync", job=core_api_sync).period(SYNC_PERIOD).start()
|
||||||
|
|
||||||
|
# Schedule a periodic task: read the memory state
|
||||||
|
manager.register_task(name="read_database", job=analyze_db).period(35).start()
|
||||||
|
|
||||||
|
@app.route('/', methods=['GET'])
|
||||||
|
def root_query():
|
||||||
|
playhead = jt
|
||||||
|
return jsonify(playhead)
|
||||||
|
|
||||||
|
def create_app():
|
||||||
|
return app
|
2
src/scheduler/requirements-pydantic-pin.txt
Normal file
2
src/scheduler/requirements-pydantic-pin.txt
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
pydantic==1.10.2
|
||||||
|
pydantic-collections==0.3.0
|
9
src/scheduler/requirements.txt
Normal file
9
src/scheduler/requirements.txt
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
blinker==1.7.0
|
||||||
|
click==8.1.7
|
||||||
|
Flask==3.0.0
|
||||||
|
itsdangerous==2.1.2
|
||||||
|
Jinja2==3.1.2
|
||||||
|
MarkupSafe==2.1.3
|
||||||
|
schedule-manager==0.1.1
|
||||||
|
waitress==2.1.2
|
||||||
|
Werkzeug==3.0.1
|
11
src/scheduler/templates/index.html
Normal file
11
src/scheduler/templates/index.html
Normal file
|
@ -0,0 +1,11 @@
|
||||||
|
<!DOCTYPE html>
|
||||||
|
|
||||||
|
<html lang="en">
|
||||||
|
<head>
|
||||||
|
<meta charset="UTF-8">
|
||||||
|
<title>scheduler api</title>
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1>hi</h1>
|
||||||
|
</body>
|
||||||
|
</html>
|
Loading…
Reference in a new issue