mirror of
https://github.com/fHDHR/fHDHR_NextPVR.git
synced 2025-12-06 12:56:57 -05:00
commit
8a78c11709
@ -1,9 +1,41 @@
|
|||||||
[main]
|
[main]
|
||||||
# uuid =
|
# uuid =
|
||||||
# cache_dir =
|
# cache_dir =
|
||||||
|
# servicename = NextPVR
|
||||||
|
# reponame = fHDHR_NextPVR
|
||||||
|
|
||||||
[blocks]
|
[fhdhr]
|
||||||
# epg_update_frequency =
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-NextPVR
|
||||||
|
# reporting_firmware_name = fHDHR_NextPVR
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
|
||||||
|
[ffmpeg]
|
||||||
|
# ffmpeg_path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1048576
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
|
||||||
[nextpvr]
|
[nextpvr]
|
||||||
# address = localhost
|
# address = localhost
|
||||||
@ -11,16 +43,6 @@
|
|||||||
# ssl =
|
# ssl =
|
||||||
# pin =
|
# pin =
|
||||||
# weight = 300
|
# weight = 300
|
||||||
# epg_update_frequency = 43200
|
|
||||||
|
|
||||||
[fhdhr]
|
|
||||||
# address = 0.0.0.0
|
|
||||||
# port = 5004
|
|
||||||
# discovery_address = 0.0.0.0
|
|
||||||
# tuner_count = 3
|
|
||||||
# friendlyname = fHDHR-Locast
|
|
||||||
# stream_type = ffmpeg
|
|
||||||
# epg_method = origin
|
|
||||||
|
|
||||||
[zap2it]
|
[zap2it]
|
||||||
# delay = 5
|
# delay = 5
|
||||||
@ -35,18 +57,3 @@
|
|||||||
# timespan = 6
|
# timespan = 6
|
||||||
# timezone =
|
# timezone =
|
||||||
# userid = -
|
# userid = -
|
||||||
# epg_update_frequency = 43200
|
|
||||||
|
|
||||||
[ffmpeg]
|
|
||||||
# ffmpeg_path = ffmpeg
|
|
||||||
# bytes_per_read = 1152000
|
|
||||||
|
|
||||||
[direct_stream]
|
|
||||||
# chunksize = 1024*1024
|
|
||||||
|
|
||||||
[dev]
|
|
||||||
# reporting_manufacturer = BoronDust
|
|
||||||
# reporting_model = fHDHR
|
|
||||||
# reporting_firmware_ver = 20201001
|
|
||||||
# reporting_tuner_type = Antenna
|
|
||||||
# device_auth = fHDHR
|
|
||||||
|
|||||||
@ -1,7 +1,7 @@
|
|||||||
[nextpvr]
|
[nextpvr]
|
||||||
address = localhost
|
# address = localhost
|
||||||
port = 8866
|
# port = 8866
|
||||||
pin =
|
# pin =
|
||||||
|
|
||||||
[fhdhr]
|
[fhdhr]
|
||||||
# address = 0.0.0.0
|
# address = 0.0.0.0
|
||||||
|
|||||||
@ -2,13 +2,19 @@
|
|||||||
uuid =
|
uuid =
|
||||||
cache_dir =
|
cache_dir =
|
||||||
|
|
||||||
[blocks]
|
|
||||||
epg_update_frequency = 43200
|
|
||||||
|
|
||||||
[fhdhr]
|
[fhdhr]
|
||||||
address = 0.0.0.0
|
address = 0.0.0.0
|
||||||
port = 5004
|
|
||||||
discovery_address = 0.0.0.0
|
discovery_address = 0.0.0.0
|
||||||
|
port = 5004
|
||||||
|
reporting_manufacturer = BoronDust
|
||||||
|
reporting_model = fHDHR
|
||||||
|
reporting_firmware_ver = 20201001
|
||||||
|
reporting_tuner_type = Antenna
|
||||||
|
device_auth = fHDHR
|
||||||
|
require_auth = False
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
images = pass
|
||||||
|
|
||||||
[ffmpeg]
|
[ffmpeg]
|
||||||
ffmpeg_path = ffmpeg
|
ffmpeg_path = ffmpeg
|
||||||
@ -17,9 +23,9 @@ bytes_per_read = 1152000
|
|||||||
[direct_stream]
|
[direct_stream]
|
||||||
chunksize = 1048576
|
chunksize = 1048576
|
||||||
|
|
||||||
[dev]
|
[logging]
|
||||||
reporting_manufacturer = BoronDust
|
level = WARNING
|
||||||
reporting_model = fHDHR
|
|
||||||
reporting_firmware_ver = 20201001
|
[database]
|
||||||
reporting_tuner_type = Antenna
|
type = sqlite
|
||||||
device_auth = fHDHR
|
driver = None
|
||||||
|
|||||||
@ -1,6 +1,3 @@
|
|||||||
[dev]
|
|
||||||
reporting_firmware_name = fHDHR_NextPVR
|
|
||||||
|
|
||||||
[main]
|
[main]
|
||||||
servicename = NextPVR
|
servicename = NextPVR
|
||||||
dictpopname = nextpvr
|
dictpopname = nextpvr
|
||||||
@ -11,8 +8,12 @@ valid_epg_methods = None,blocks,origin,zap2it
|
|||||||
[fhdhr]
|
[fhdhr]
|
||||||
friendlyname = fHDHR-NextPVR
|
friendlyname = fHDHR-NextPVR
|
||||||
stream_type = direct
|
stream_type = direct
|
||||||
epg_method = origin
|
|
||||||
tuner_count = 4
|
tuner_count = 4
|
||||||
|
reporting_firmware_name = fHDHR_NextPVR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
method = origin
|
||||||
|
update_frequency = 43200
|
||||||
|
|
||||||
[nextpvr]
|
[nextpvr]
|
||||||
address = localhost
|
address = localhost
|
||||||
@ -22,18 +23,3 @@ pin =
|
|||||||
weight = 300
|
weight = 300
|
||||||
epg_update_frequency = 43200
|
epg_update_frequency = 43200
|
||||||
sid =
|
sid =
|
||||||
|
|
||||||
[zap2it]
|
|
||||||
delay = 5
|
|
||||||
postalcode =
|
|
||||||
affiliate_id = gapzap
|
|
||||||
country = USA
|
|
||||||
device = -
|
|
||||||
headendid = lineupId
|
|
||||||
isoverride = True
|
|
||||||
languagecode = en
|
|
||||||
pref =
|
|
||||||
timespan = 6
|
|
||||||
timezone =
|
|
||||||
userid = -
|
|
||||||
epg_update_frequency = 43200
|
|
||||||
|
|||||||
13
data/internal_config/zap2it.ini
Normal file
13
data/internal_config/zap2it.ini
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
[zap2it]
|
||||||
|
delay = 5
|
||||||
|
postalcode =
|
||||||
|
affiliate_id = gapzap
|
||||||
|
country = USA
|
||||||
|
device = -
|
||||||
|
headendid = lineupId
|
||||||
|
isoverride = True
|
||||||
|
languagecode = en
|
||||||
|
pref =
|
||||||
|
timespan = 6
|
||||||
|
timezone =
|
||||||
|
userid = -
|
||||||
@ -1,2 +1,23 @@
|
|||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
fHDHR_VERSION = "v0.3.0-beta"
|
|
||||||
|
from .origin import OriginServiceWrapper
|
||||||
|
from .device import fHDHR_Device
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
|
||||||
|
fHDHR_VERSION = "v0.4.0-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db):
|
||||||
|
self.version = fHDHR_VERSION
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
self.web = fHDHR.tools.WebReq()
|
||||||
|
|
||||||
|
self.origin = OriginServiceWrapper(settings, logger, self.web, db)
|
||||||
|
|
||||||
|
self.device = fHDHR_Device(settings, self.version, self.origin, logger, self.web, db)
|
||||||
|
|||||||
@ -1,234 +0,0 @@
|
|||||||
from gevent.pywsgi import WSGIServer
|
|
||||||
from flask import (Flask, send_from_directory, request,
|
|
||||||
abort, Response, stream_with_context, redirect)
|
|
||||||
|
|
||||||
from . import hub
|
|
||||||
|
|
||||||
|
|
||||||
fhdhrhub = hub.fHDHR_Hub()
|
|
||||||
|
|
||||||
|
|
||||||
class HDHR_HTTP_Server():
|
|
||||||
app = Flask(__name__,)
|
|
||||||
|
|
||||||
@app.route('/')
|
|
||||||
def root_path():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_index_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/guide')
|
|
||||||
def channel_guide_html():
|
|
||||||
return fhdhrhub.get_channel_guide_html()
|
|
||||||
|
|
||||||
@app.route('/origin')
|
|
||||||
def origin_html():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_origin_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/cluster')
|
|
||||||
def cluster_html():
|
|
||||||
method = request.args.get('method', default=None, type=str)
|
|
||||||
|
|
||||||
if method == "scan":
|
|
||||||
fhdhrhub.m_search()
|
|
||||||
|
|
||||||
elif method == 'add':
|
|
||||||
fhdhrhub.cluster_add(request.args.get("location", default=None, type=str))
|
|
||||||
elif method == 'del':
|
|
||||||
fhdhrhub.cluster_del(request.args.get("location", default=None, type=str))
|
|
||||||
|
|
||||||
elif method == 'sync':
|
|
||||||
fhdhrhub.cluster_sync(request.args.get("location", default=None, type=str))
|
|
||||||
|
|
||||||
elif method == 'leave':
|
|
||||||
fhdhrhub.cluster_leave()
|
|
||||||
elif method == 'disconnect':
|
|
||||||
fhdhrhub.cluster_disconnect()
|
|
||||||
|
|
||||||
if method:
|
|
||||||
return redirect('/cluster')
|
|
||||||
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_cluster_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/style.css', methods=['GET'])
|
|
||||||
def style_css():
|
|
||||||
return send_from_directory(fhdhrhub.config.dict["filedir"]["www_dir"], 'style.css')
|
|
||||||
|
|
||||||
@app.route('/favicon.ico', methods=['GET'])
|
|
||||||
def favicon():
|
|
||||||
return send_from_directory(fhdhrhub.config.dict["filedir"]["www_dir"],
|
|
||||||
'favicon.ico',
|
|
||||||
mimetype='image/vnd.microsoft.icon')
|
|
||||||
|
|
||||||
@app.route('/device.xml', methods=['GET'])
|
|
||||||
def device_xml():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
device_xml = fhdhrhub.get_device_xml(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=device_xml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
|
|
||||||
@app.route('/discover.json', methods=['GET'])
|
|
||||||
def discover_json():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
discover_json = fhdhrhub.get_discover_json(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=discover_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/lineup_status.json', methods=['GET'])
|
|
||||||
def lineup_status_json():
|
|
||||||
linup_status_json = fhdhrhub.get_lineup_status_json()
|
|
||||||
return Response(status=200,
|
|
||||||
response=linup_status_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/lineup.xml', methods=['GET'])
|
|
||||||
def lineup_xml():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
lineupxml = fhdhrhub.get_lineup_xml(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=lineupxml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
|
|
||||||
@app.route('/lineup.json', methods=['GET'])
|
|
||||||
def lineup_json():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
station_list = fhdhrhub.get_lineup_json(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=station_list,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/cluster.json', methods=['GET'])
|
|
||||||
def cluster_json():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
cluster_list = fhdhrhub.get_cluster_json(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=cluster_list,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/xmltv.xml', methods=['GET'])
|
|
||||||
def xmltv_xml():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
xmltv = fhdhrhub.get_xmltv(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=xmltv,
|
|
||||||
mimetype='application/xml')
|
|
||||||
|
|
||||||
@app.route('/api/xmltv')
|
|
||||||
def api_xmltv():
|
|
||||||
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
|
||||||
if DeviceAuth == fhdhrhub.config.dict["dev"]["device_auth"]:
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
xmltv = fhdhrhub.get_xmltv(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=xmltv,
|
|
||||||
mimetype='application/xml')
|
|
||||||
return "not subscribed"
|
|
||||||
|
|
||||||
@app.route('/diagnostics', methods=['GET'])
|
|
||||||
def debug_html():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_diagnostics_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/streams', methods=['GET'])
|
|
||||||
def streams_html():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_streams_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/version', methods=['GET'])
|
|
||||||
def version_html():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
return fhdhrhub.get_version_html(base_url)
|
|
||||||
|
|
||||||
@app.route('/debug.json', methods=['GET'])
|
|
||||||
def debug_json():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
debugreport = fhdhrhub.get_debug_json(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=debugreport,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/api/channels.m3u', methods=['GET'])
|
|
||||||
@app.route('/channels.m3u', methods=['GET'])
|
|
||||||
def channels_m3u():
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
channels_m3u = fhdhrhub.get_channels_m3u(base_url)
|
|
||||||
return Response(status=200,
|
|
||||||
response=channels_m3u,
|
|
||||||
mimetype='text/plain')
|
|
||||||
|
|
||||||
@app.route('/<channel>.m3u', methods=['GET'])
|
|
||||||
def channel_m3u(channel):
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
channel_m3u = fhdhrhub.get_channel_m3u(base_url, channel)
|
|
||||||
return Response(status=200,
|
|
||||||
response=channel_m3u,
|
|
||||||
mimetype='text/plain')
|
|
||||||
|
|
||||||
@app.route('/images', methods=['GET'])
|
|
||||||
def images():
|
|
||||||
image, imagetype = fhdhrhub.get_image(request.args)
|
|
||||||
return Response(image, content_type=imagetype, direct_passthrough=True)
|
|
||||||
|
|
||||||
@app.route('/auto/<channel>')
|
|
||||||
def auto(channel):
|
|
||||||
base_url = request.headers["host"]
|
|
||||||
stream_args = {
|
|
||||||
"channel": channel.replace('v', ''),
|
|
||||||
"method": request.args.get('method', default=fhdhrhub.config.dict["fhdhr"]["stream_type"], type=str),
|
|
||||||
"duration": request.args.get('duration', default=0, type=int),
|
|
||||||
"accessed": fhdhrhub.device.channels.get_fhdhr_stream_url(base_url, channel.replace('v', '')),
|
|
||||||
}
|
|
||||||
stream_args = fhdhrhub.get_stream_info(stream_args)
|
|
||||||
if stream_args["channelUri"]:
|
|
||||||
if stream_args["method"] == "direct":
|
|
||||||
return Response(fhdhrhub.get_stream(stream_args), content_type=stream_args["content_type"], direct_passthrough=True)
|
|
||||||
elif stream_args["method"] == "ffmpeg":
|
|
||||||
return Response(stream_with_context(fhdhrhub.get_stream(stream_args)), mimetype="video/mpeg")
|
|
||||||
abort(503)
|
|
||||||
|
|
||||||
@app.route('/chanscan', methods=['GET'])
|
|
||||||
def chanscan():
|
|
||||||
fhdhrhub.post_lineup_scan_start()
|
|
||||||
linup_status_json = fhdhrhub.get_lineup_status_json()
|
|
||||||
return Response(status=200,
|
|
||||||
response=linup_status_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
@app.route('/lineup.post', methods=['POST'])
|
|
||||||
def lineup_post():
|
|
||||||
if 'scan' in list(request.args.keys()):
|
|
||||||
if request.args['scan'] == 'start':
|
|
||||||
fhdhrhub.post_lineup_scan_start()
|
|
||||||
return Response(status=200, mimetype='text/html')
|
|
||||||
elif request.args['scan'] == 'abort':
|
|
||||||
return Response(status=200, mimetype='text/html')
|
|
||||||
else:
|
|
||||||
print("Unknown scan command " + request.args['scan'])
|
|
||||||
currenthtmlerror = fhdhrhub.get_html_error("501 - " + request.args['scan'] + " is not a valid scan command")
|
|
||||||
return Response(status=200, response=currenthtmlerror, mimetype='text/html')
|
|
||||||
else:
|
|
||||||
currenthtmlerror = fhdhrhub.get_html_error("501 - not a valid command")
|
|
||||||
return Response(status=200, response=currenthtmlerror, mimetype='text/html')
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
self.http = WSGIServer((
|
|
||||||
self.config.dict["fhdhr"]["address"],
|
|
||||||
int(self.config.dict["fhdhr"]["port"])
|
|
||||||
), self.app.wsgi_app)
|
|
||||||
try:
|
|
||||||
self.http.serve_forever()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
self.http.stop()
|
|
||||||
|
|
||||||
|
|
||||||
def interface_start(settings, origin):
|
|
||||||
print("Starting fHDHR Web Interface")
|
|
||||||
fhdhrhub.setup(settings, origin)
|
|
||||||
fakhdhrserver = HDHR_HTTP_Server(settings)
|
|
||||||
fakhdhrserver.run()
|
|
||||||
@ -1,102 +0,0 @@
|
|||||||
from . import device, pages, files
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Hub():
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def setup(self, settings, origin):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
self.origin = origin
|
|
||||||
|
|
||||||
self.device = device.fHDHR_Device(settings, origin)
|
|
||||||
|
|
||||||
self.pages = pages.fHDHR_Pages(settings, self.device)
|
|
||||||
|
|
||||||
self.files = files.fHDHR_Files(settings, self.device)
|
|
||||||
|
|
||||||
def get_xmltv(self, base_url):
|
|
||||||
return self.files.xmltv.get_xmltv_xml(base_url)
|
|
||||||
|
|
||||||
def get_device_xml(self, base_url):
|
|
||||||
return self.files.devicexml.get_device_xml(base_url)
|
|
||||||
|
|
||||||
def get_discover_json(self, base_url):
|
|
||||||
return self.files.discoverjson.get_discover_json(base_url)
|
|
||||||
|
|
||||||
def get_lineup_status_json(self):
|
|
||||||
return self.files.lineupstatusjson.get_lineup_status_json()
|
|
||||||
|
|
||||||
def get_lineup_xml(self, base_url):
|
|
||||||
return self.files.lineupxml.get_lineup_xml(base_url)
|
|
||||||
|
|
||||||
def get_lineup_json(self, base_url):
|
|
||||||
return self.files.lineupjson.get_lineup_json(base_url)
|
|
||||||
|
|
||||||
def get_debug_json(self, base_url):
|
|
||||||
return self.files.debug.get_debug_json(base_url)
|
|
||||||
|
|
||||||
def get_cluster_json(self, base_url):
|
|
||||||
return self.files.cluster.get_cluster_json(base_url)
|
|
||||||
|
|
||||||
def get_html_error(self, message):
|
|
||||||
return self.pages.htmlerror.get_html_error(message)
|
|
||||||
|
|
||||||
def post_lineup_scan_start(self):
|
|
||||||
self.device.station_scan.scan()
|
|
||||||
|
|
||||||
def get_image(self, request_args):
|
|
||||||
return self.device.images.get_image(request_args)
|
|
||||||
|
|
||||||
def get_channels_m3u(self, base_url):
|
|
||||||
return self.files.m3u.get_channels_m3u(base_url)
|
|
||||||
|
|
||||||
def get_channel_m3u(self, base_url, channel_number):
|
|
||||||
return self.files.m3u.get_channel_m3u(base_url, channel_number)
|
|
||||||
|
|
||||||
def get_stream_info(self, stream_args):
|
|
||||||
return self.device.watch.get_stream_info(stream_args)
|
|
||||||
|
|
||||||
def get_stream(self, stream_args):
|
|
||||||
return self.device.watch.get_stream(stream_args)
|
|
||||||
|
|
||||||
def get_index_html(self, base_url):
|
|
||||||
return self.pages.index.get_index_html(base_url)
|
|
||||||
|
|
||||||
def get_channel_guide_html(self):
|
|
||||||
return self.pages.channel_guide.get_channel_guide_html()
|
|
||||||
|
|
||||||
def get_diagnostics_html(self, base_url):
|
|
||||||
return self.pages.diagnostics.get_diagnostics_html(base_url)
|
|
||||||
|
|
||||||
def get_streams_html(self, base_url):
|
|
||||||
return self.pages.streams.get_streams_html(base_url)
|
|
||||||
|
|
||||||
def get_version_html(self, base_url):
|
|
||||||
return self.pages.version.get_version_html(base_url)
|
|
||||||
|
|
||||||
def get_origin_html(self, base_url):
|
|
||||||
return self.pages.origin.get_origin_html(base_url)
|
|
||||||
|
|
||||||
def get_cluster_html(self, base_url):
|
|
||||||
return self.pages.cluster.get_cluster_html(base_url)
|
|
||||||
|
|
||||||
def m_search(self):
|
|
||||||
self.device.ssdp.m_search()
|
|
||||||
|
|
||||||
def cluster_add(self, location):
|
|
||||||
self.device.cluster.add(location)
|
|
||||||
|
|
||||||
def cluster_del(self, location):
|
|
||||||
self.device.cluster.remove(location)
|
|
||||||
|
|
||||||
def cluster_sync(self, location):
|
|
||||||
self.device.cluster.sync(location)
|
|
||||||
|
|
||||||
def cluster_leave(self):
|
|
||||||
self.device.cluster.leave()
|
|
||||||
|
|
||||||
def cluster_disconnect(self):
|
|
||||||
self.device.cluster.disconnect()
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
from . import channels, epg
|
|
||||||
from .tuners import Tuners
|
|
||||||
from .watch import WatchStream
|
|
||||||
from .images import imageHandler
|
|
||||||
from .station_scan import Station_Scan
|
|
||||||
from .ssdp import SSDPServer
|
|
||||||
from .cluster import fHDHR_Cluster
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Device():
|
|
||||||
|
|
||||||
def __init__(self, settings, origin):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
self.channels = channels.Channels(settings, origin)
|
|
||||||
|
|
||||||
self.epg = epg.EPG(settings, self.channels)
|
|
||||||
|
|
||||||
self.tuners = Tuners(settings, self.epg)
|
|
||||||
|
|
||||||
self.watch = WatchStream(settings, self.channels, self.tuners)
|
|
||||||
|
|
||||||
self.images = imageHandler(settings, self.epg)
|
|
||||||
|
|
||||||
self.station_scan = Station_Scan(settings, self.channels)
|
|
||||||
|
|
||||||
self.ssdp = SSDPServer(settings)
|
|
||||||
|
|
||||||
self.cluster = fHDHR_Cluster(settings, self.ssdp)
|
|
||||||
@ -1,142 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import urllib.parse
|
|
||||||
import requests
|
|
||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Cluster():
|
|
||||||
|
|
||||||
def __init__(self, settings, ssdp):
|
|
||||||
self.config = settings
|
|
||||||
self.ssdp = ssdp
|
|
||||||
self.cluster_file = self.config.dict["main"]["cluster"]
|
|
||||||
self.friendlyname = self.config.dict["fhdhr"]["friendlyname"]
|
|
||||||
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
|
|
||||||
str(settings.dict["fhdhr"]["port"]))
|
|
||||||
self.location_url = urllib.parse.quote(self.location)
|
|
||||||
self.cluster = self.default_cluster()
|
|
||||||
self.load_cluster()
|
|
||||||
self.startup_sync()
|
|
||||||
|
|
||||||
def get_list(self):
|
|
||||||
return_dict = {}
|
|
||||||
for location in list(self.cluster.keys()):
|
|
||||||
if location != self.location:
|
|
||||||
return_dict[location] = {
|
|
||||||
"Joined": True
|
|
||||||
}
|
|
||||||
|
|
||||||
detected_list = self.ssdp.detect_method.get()
|
|
||||||
for location in detected_list:
|
|
||||||
if location not in list(self.cluster.keys()):
|
|
||||||
return_dict[location] = {
|
|
||||||
"Joined": False
|
|
||||||
}
|
|
||||||
return_dict = OrderedDict(sorted(return_dict.items()))
|
|
||||||
return return_dict
|
|
||||||
|
|
||||||
def default_cluster(self):
|
|
||||||
defdict = {}
|
|
||||||
defdict[self.location] = {
|
|
||||||
"base_url": self.location,
|
|
||||||
"name": self.friendlyname
|
|
||||||
}
|
|
||||||
return defdict
|
|
||||||
|
|
||||||
def load_cluster(self):
|
|
||||||
if os.path.isfile(self.cluster_file):
|
|
||||||
with open(self.cluster_file, 'r') as clusterfile:
|
|
||||||
self.cluster = json.load(clusterfile)
|
|
||||||
if self.location not in list(self.cluster.keys()):
|
|
||||||
self.cluster[self.location] = self.default_cluster()[self.location]
|
|
||||||
else:
|
|
||||||
self.cluster = self.default_cluster()
|
|
||||||
|
|
||||||
def startup_sync(self):
|
|
||||||
for location in list(self.cluster.keys()):
|
|
||||||
if location != self.location:
|
|
||||||
sync_url = location + "/cluster.json"
|
|
||||||
try:
|
|
||||||
sync_open = requests.get(sync_url)
|
|
||||||
retrieved_cluster = sync_open.json()
|
|
||||||
if self.location not in list(retrieved_cluster.keys()):
|
|
||||||
return self.leave()
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
|
|
||||||
def save_cluster(self):
|
|
||||||
with open(self.cluster_file, 'w') as clusterfile:
|
|
||||||
clusterfile.write(json.dumps(self.cluster, indent=4))
|
|
||||||
|
|
||||||
def leave(self):
|
|
||||||
self.cluster = self.default_cluster()
|
|
||||||
self.save_cluster()
|
|
||||||
|
|
||||||
def disconnect(self):
|
|
||||||
for location in list(self.cluster.keys()):
|
|
||||||
if location != self.location:
|
|
||||||
sync_url = location + "/cluster?method=del&location=" + self.location
|
|
||||||
try:
|
|
||||||
requests.get(sync_url)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
self.leave()
|
|
||||||
|
|
||||||
def sync(self, location):
|
|
||||||
sync_url = location + "/cluster.json"
|
|
||||||
try:
|
|
||||||
sync_open = requests.get(sync_url)
|
|
||||||
self.cluster = sync_open.json()
|
|
||||||
self.save_cluster()
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
|
|
||||||
def push_sync(self):
|
|
||||||
for location in list(self.cluster.keys()):
|
|
||||||
if location != self.location:
|
|
||||||
sync_url = location + "/cluster?method=sync&location=" + self.location_url
|
|
||||||
try:
|
|
||||||
requests.get(sync_url)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
|
|
||||||
def add(self, location):
|
|
||||||
if location not in list(self.cluster.keys()):
|
|
||||||
self.cluster[location] = {"base_url": location}
|
|
||||||
|
|
||||||
location_info_url = location + "/discover.json"
|
|
||||||
try:
|
|
||||||
location_info_req = requests.get(location_info_url)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
del self.cluster[location]
|
|
||||||
return
|
|
||||||
location_info = location_info_req.json()
|
|
||||||
self.cluster[location]["name"] = location_info["FriendlyName"]
|
|
||||||
|
|
||||||
cluster_info_url = location + "/cluster.json"
|
|
||||||
try:
|
|
||||||
cluster_info_req = requests.get(cluster_info_url)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
del self.cluster[location]
|
|
||||||
return
|
|
||||||
cluster_info = cluster_info_req.json()
|
|
||||||
for cluster_key in list(cluster_info.keys()):
|
|
||||||
if cluster_key not in list(self.cluster.keys()):
|
|
||||||
self.cluster[cluster_key] = cluster_info[cluster_key]
|
|
||||||
|
|
||||||
self.push_sync()
|
|
||||||
self.save_cluster()
|
|
||||||
|
|
||||||
def remove(self, location):
|
|
||||||
if location in list(self.cluster.keys()):
|
|
||||||
del self.cluster[location]
|
|
||||||
sync_url = location + "/cluster?method=leave"
|
|
||||||
try:
|
|
||||||
requests.get(sync_url)
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
self.push_sync()
|
|
||||||
self.save_cluster()
|
|
||||||
@ -1,121 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import time
|
|
||||||
import datetime
|
|
||||||
from collections import OrderedDict
|
|
||||||
from multiprocessing import Process
|
|
||||||
|
|
||||||
from fHDHR.origin import origin_epg
|
|
||||||
from .epgtypes import blocks, zap2it
|
|
||||||
|
|
||||||
|
|
||||||
class EPG():
|
|
||||||
|
|
||||||
def __init__(self, settings, channels):
|
|
||||||
self.config = settings
|
|
||||||
self.channels = channels
|
|
||||||
|
|
||||||
self.origin = origin_epg.originEPG(settings, channels)
|
|
||||||
|
|
||||||
self.epgdict = None
|
|
||||||
|
|
||||||
self.epg_method_selfadd()
|
|
||||||
|
|
||||||
self.epg_method = self.config.dict["fhdhr"]["epg_method"]
|
|
||||||
if self.epg_method:
|
|
||||||
self.sleeptime = self.config.dict[self.epg_method]["epg_update_frequency"]
|
|
||||||
|
|
||||||
self.epg_cache_file = self.config.dict["filedir"]["epg_cache"][self.epg_method]["epg_json"]
|
|
||||||
|
|
||||||
self.epgtypename = self.epg_method
|
|
||||||
if self.epg_method in [self.config.dict["main"]["dictpopname"], "origin"]:
|
|
||||||
self.epgtypename = self.config.dict["main"]["dictpopname"]
|
|
||||||
|
|
||||||
self.epgscan = Process(target=self.epgServerProcess)
|
|
||||||
self.epgscan.start()
|
|
||||||
|
|
||||||
def whats_on_now(self, channel):
|
|
||||||
epgdict = self.get_epg()
|
|
||||||
listings = epgdict[channel]["listing"]
|
|
||||||
for listing in listings:
|
|
||||||
nowtime = datetime.datetime.utcnow()
|
|
||||||
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
|
||||||
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
|
||||||
if start_time <= nowtime <= end_time:
|
|
||||||
epgitem = epgdict[channel].copy()
|
|
||||||
epgitem["listing"] = [listing]
|
|
||||||
return epgitem
|
|
||||||
return None
|
|
||||||
|
|
||||||
def whats_on_allchans(self):
|
|
||||||
channel_guide_list = []
|
|
||||||
for channel in self.channels.get_channels():
|
|
||||||
channel_guide_list.append(self.whats_on_now(channel["number"]))
|
|
||||||
return channel_guide_list
|
|
||||||
|
|
||||||
def get_epg(self):
|
|
||||||
if not self.epgdict:
|
|
||||||
if os.path.isfile(self.epg_cache_file):
|
|
||||||
with open(self.epg_cache_file, 'r') as epgfile:
|
|
||||||
self.epgdict = json.load(epgfile)
|
|
||||||
return self.epgdict
|
|
||||||
|
|
||||||
def get_thumbnail(self, itemtype, itemid):
|
|
||||||
if itemtype == "channel":
|
|
||||||
chandict = self.find_channel_dict(itemid)
|
|
||||||
return chandict["thumbnail"]
|
|
||||||
elif itemtype == "content":
|
|
||||||
progdict = self.find_program_dict(itemid)
|
|
||||||
return progdict["thumbnail"]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def find_channel_dict(self, channel_id):
|
|
||||||
epgdict = self.get_epg()
|
|
||||||
channel_list = []
|
|
||||||
for channel in list(epgdict.keys()):
|
|
||||||
channel_list.append(epgdict[channel])
|
|
||||||
return next(item for item in channel_list if item["id"] == channel_id)
|
|
||||||
|
|
||||||
def find_program_dict(self, event_id):
|
|
||||||
epgdict = self.get_epg()
|
|
||||||
event_list = []
|
|
||||||
for channel in list(epgdict.keys()):
|
|
||||||
event_list.extend(epgdict[channel]["listing"])
|
|
||||||
return next(item for item in event_list if item["id"] == event_id)
|
|
||||||
|
|
||||||
def epg_method_selfadd(self):
|
|
||||||
for method in self.config.dict["main"]["valid_epg_methods"]:
|
|
||||||
if method not in [None, "None", "origin", self.config.dict["main"]["dictpopname"]]:
|
|
||||||
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.config, self.channels)"))
|
|
||||||
|
|
||||||
def update(self):
|
|
||||||
|
|
||||||
print("Updating " + self.epgtypename + " EPG cache file.")
|
|
||||||
method_to_call = getattr(self, self.epg_method)
|
|
||||||
func_to_call = getattr(method_to_call, 'update_epg')
|
|
||||||
programguide = func_to_call()
|
|
||||||
|
|
||||||
for chan in list(programguide.keys()):
|
|
||||||
floatnum = str(float(chan))
|
|
||||||
programguide[floatnum] = programguide.pop(chan)
|
|
||||||
programguide[floatnum]["number"] = floatnum
|
|
||||||
|
|
||||||
programguide = OrderedDict(sorted(programguide.items()))
|
|
||||||
|
|
||||||
for cnum in programguide:
|
|
||||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
|
||||||
|
|
||||||
with open(self.epg_cache_file, 'w') as epgfile:
|
|
||||||
epgfile.write(json.dumps(programguide, indent=4))
|
|
||||||
print("Wrote " + self.epgtypename + " EPG cache file.")
|
|
||||||
self.epgdict = programguide
|
|
||||||
|
|
||||||
def epgServerProcess(self):
|
|
||||||
print("Starting EPG thread...")
|
|
||||||
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
self.update()
|
|
||||||
time.sleep(self.sleeptime)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
@ -1,27 +0,0 @@
|
|||||||
from multiprocessing import Process
|
|
||||||
|
|
||||||
|
|
||||||
class Station_Scan():
|
|
||||||
|
|
||||||
def __init__(self, settings, channels):
|
|
||||||
self.config = settings
|
|
||||||
self.channels = channels
|
|
||||||
self.chanscan = Process(target=self.runscan)
|
|
||||||
|
|
||||||
def scan(self):
|
|
||||||
print("Channel Scan Requested by Client.")
|
|
||||||
try:
|
|
||||||
self.chanscan.start()
|
|
||||||
except AssertionError:
|
|
||||||
print("Channel Scan Already In Progress!")
|
|
||||||
|
|
||||||
def runscan(self):
|
|
||||||
self.channels.get_channels(forceupdate=True)
|
|
||||||
print("Requested Channel Scan Complete.")
|
|
||||||
|
|
||||||
def scanning(self):
|
|
||||||
try:
|
|
||||||
self.chanscan.join(timeout=0)
|
|
||||||
return self.chanscan.is_alive()
|
|
||||||
except AssertionError:
|
|
||||||
return False
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
# pylama:ignore=W0611
|
|
||||||
from .discover_json import Discover_JSON
|
|
||||||
from .device_xml import Device_XML
|
|
||||||
from .lineup_xml import Lineup_XML
|
|
||||||
from .lineup_json import Lineup_JSON
|
|
||||||
from .debug_json import Debug_JSON
|
|
||||||
from .lineup_status_json import Lineup_Status_JSON
|
|
||||||
from .xmltv_xml import xmlTV_XML
|
|
||||||
from .m3u import channels_M3U
|
|
||||||
from .cluster_json import Cluster_JSON
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Files():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
self.discoverjson = Discover_JSON(settings)
|
|
||||||
self.devicexml = Device_XML(settings)
|
|
||||||
|
|
||||||
self.lineupxml = Lineup_XML(settings, device)
|
|
||||||
self.lineupjson = Lineup_JSON(settings, device)
|
|
||||||
self.lineupstatusjson = Lineup_Status_JSON(settings, device)
|
|
||||||
|
|
||||||
self.xmltv = xmlTV_XML(settings, device)
|
|
||||||
self.m3u = channels_M3U(settings, device)
|
|
||||||
|
|
||||||
self.debug = Debug_JSON(settings, device)
|
|
||||||
self.cluster = Cluster_JSON(settings, device)
|
|
||||||
@ -1,13 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Cluster_JSON():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_cluster_json(self, base_url, force_update=False):
|
|
||||||
jsoncluster = self.device.cluster.cluster
|
|
||||||
cluster_json = json.dumps(jsoncluster, indent=4)
|
|
||||||
return cluster_json
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Debug_JSON():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_debug_json(self, base_url):
|
|
||||||
debugjson = {
|
|
||||||
"base_url": base_url,
|
|
||||||
"total channels": self.device.channels.get_station_total(),
|
|
||||||
"tuner status": self.device.tuners.status(),
|
|
||||||
}
|
|
||||||
return json.dumps(debugjson, indent=4)
|
|
||||||
@ -1,38 +0,0 @@
|
|||||||
import xml.etree.ElementTree
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class Device_XML():
|
|
||||||
device_xml = None
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
def get_device_xml(self, base_url, force_update=False):
|
|
||||||
if not self.device_xml or force_update:
|
|
||||||
out = xml.etree.ElementTree.Element('root')
|
|
||||||
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
|
||||||
|
|
||||||
sub_el(out, 'URLBase', "http://" + base_url)
|
|
||||||
|
|
||||||
specVersion_out = sub_el(out, 'specVersion')
|
|
||||||
sub_el(specVersion_out, 'major', "1")
|
|
||||||
sub_el(specVersion_out, 'minor', "0")
|
|
||||||
|
|
||||||
device_out = sub_el(out, 'device')
|
|
||||||
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
|
||||||
sub_el(device_out, 'friendlyName', self.config.dict["fhdhr"]["friendlyname"])
|
|
||||||
sub_el(device_out, 'manufacturer', self.config.dict["dev"]["reporting_manufacturer"])
|
|
||||||
sub_el(device_out, 'modelName', self.config.dict["dev"]["reporting_model"])
|
|
||||||
sub_el(device_out, 'modelNumber', self.config.dict["dev"]["reporting_model"])
|
|
||||||
sub_el(device_out, 'serialNumber')
|
|
||||||
sub_el(device_out, 'UDN', "uuid:" + self.config.dict["main"]["uuid"])
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
self.device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return self.device_xml
|
|
||||||
@ -1,26 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Discover_JSON():
|
|
||||||
discover_json = None
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
def get_discover_json(self, base_url, force_update=False):
|
|
||||||
if not self.discover_json or force_update:
|
|
||||||
jsondiscover = {
|
|
||||||
"FriendlyName": self.config.dict["fhdhr"]["friendlyname"],
|
|
||||||
"Manufacturer": self.config.dict["dev"]["reporting_manufacturer"],
|
|
||||||
"ModelNumber": self.config.dict["dev"]["reporting_model"],
|
|
||||||
"FirmwareName": self.config.dict["dev"]["reporting_firmware_name"],
|
|
||||||
"TunerCount": self.config.dict["fhdhr"]["tuner_count"],
|
|
||||||
"FirmwareVersion": self.config.dict["dev"]["reporting_firmware_ver"],
|
|
||||||
"DeviceID": self.config.dict["main"]["uuid"],
|
|
||||||
"DeviceAuth": self.config.dict["dev"]["device_auth"],
|
|
||||||
"BaseURL": "http://" + base_url,
|
|
||||||
"LineupURL": "http://" + base_url + "/lineup.json"
|
|
||||||
}
|
|
||||||
self.discover_json = json.dumps(jsondiscover, indent=4)
|
|
||||||
|
|
||||||
return self.discover_json
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Lineup_JSON():
|
|
||||||
lineup_json = None
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_lineup_json(self, base_url, force_update=False):
|
|
||||||
if not self.lineup_json or force_update:
|
|
||||||
jsonlineup = self.device.channels.get_station_list(base_url)
|
|
||||||
self.lineup_json = json.dumps(jsonlineup, indent=4)
|
|
||||||
|
|
||||||
return self.lineup_json
|
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Lineup_Status_JSON():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_lineup_status_json(self):
|
|
||||||
station_scanning = self.device.station_scan.scanning()
|
|
||||||
if station_scanning:
|
|
||||||
jsonlineup = self.scan_in_progress()
|
|
||||||
elif not self.device.channels.get_station_total():
|
|
||||||
jsonlineup = self.scan_in_progress()
|
|
||||||
else:
|
|
||||||
jsonlineup = self.not_scanning()
|
|
||||||
return json.dumps(jsonlineup, indent=4)
|
|
||||||
|
|
||||||
def scan_in_progress(self):
|
|
||||||
channel_count = self.device.channels.get_station_total()
|
|
||||||
jsonlineup = {
|
|
||||||
"ScanInProgress": "true",
|
|
||||||
"Progress": 99,
|
|
||||||
"Found": channel_count
|
|
||||||
}
|
|
||||||
return jsonlineup
|
|
||||||
|
|
||||||
def not_scanning(self):
|
|
||||||
jsonlineup = {
|
|
||||||
"ScanInProgress": "false",
|
|
||||||
"ScanPossible": "true",
|
|
||||||
"Source": self.config.dict["dev"]["reporting_tuner_type"],
|
|
||||||
"SourceList": [self.config.dict["dev"]["reporting_tuner_type"]],
|
|
||||||
}
|
|
||||||
return jsonlineup
|
|
||||||
@ -1,29 +0,0 @@
|
|||||||
import xml.etree.ElementTree
|
|
||||||
from io import BytesIO
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class Lineup_XML():
|
|
||||||
device_xml = None
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_lineup_xml(self, base_url, force_update=False):
|
|
||||||
if not self.device_xml or force_update:
|
|
||||||
out = xml.etree.ElementTree.Element('Lineup')
|
|
||||||
station_list = self.device.channels.get_station_list(base_url)
|
|
||||||
for station_item in station_list:
|
|
||||||
program_out = sub_el(out, 'Program')
|
|
||||||
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
|
|
||||||
sub_el(program_out, 'GuideName', station_item['GuideName'])
|
|
||||||
sub_el(program_out, 'URL', station_item['URL'])
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
self.device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return self.device_xml
|
|
||||||
@ -1,102 +0,0 @@
|
|||||||
from io import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class channels_M3U():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_channels_m3u(self, base_url):
|
|
||||||
|
|
||||||
FORMAT_DESCRIPTOR = "#EXTM3U"
|
|
||||||
RECORD_MARKER = "#EXTINF"
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
|
|
||||||
xmltvurl = ('%s%s/xmltv.xml' %
|
|
||||||
("http://",
|
|
||||||
base_url))
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
FORMAT_DESCRIPTOR + " " +
|
|
||||||
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
|
||||||
"x-tvg-url=\"" + xmltvurl + "\"")
|
|
||||||
)
|
|
||||||
|
|
||||||
for channel in self.device.channels.get_channels():
|
|
||||||
|
|
||||||
logourl = ('%s%s/images?source=epg&type=channel&id=%s' %
|
|
||||||
("http://",
|
|
||||||
base_url,
|
|
||||||
str(channel['id'])))
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
RECORD_MARKER + ":0" + " " +
|
|
||||||
"channelID=\"" + str(channel['id']) + "\" " +
|
|
||||||
"tvg-chno=\"" + str(channel['number']) + "\" " +
|
|
||||||
"tvg-name=\"" + str(channel['name']) + "\" " +
|
|
||||||
"tvg-id=\"" + str(channel['number']) + "\" " +
|
|
||||||
"tvg-logo=\"" + logourl + "\" " +
|
|
||||||
"group-title=\"" + self.config.dict["fhdhr"]["friendlyname"] + "," + str(channel['name']))
|
|
||||||
)
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
('%s%s/auto/v%s' %
|
|
||||||
("http://",
|
|
||||||
base_url,
|
|
||||||
str(channel['number'])))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return fakefile.getvalue()
|
|
||||||
|
|
||||||
def get_channel_m3u(self, base_url, channel_number):
|
|
||||||
|
|
||||||
FORMAT_DESCRIPTOR = "#EXTM3U"
|
|
||||||
RECORD_MARKER = "#EXTINF"
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
|
|
||||||
xmltvurl = ('%s%s/xmltv.xml' %
|
|
||||||
("http://",
|
|
||||||
base_url))
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
FORMAT_DESCRIPTOR + " " +
|
|
||||||
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
|
||||||
"x-tvg-url=\"" + xmltvurl + "\"")
|
|
||||||
)
|
|
||||||
|
|
||||||
channel = self.device.channels.get_channel_dict("number", channel_number)
|
|
||||||
|
|
||||||
logourl = ('%s%s/images?source=epg&type=channel&id=%s' %
|
|
||||||
("http://",
|
|
||||||
base_url,
|
|
||||||
str(channel['id'])))
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
RECORD_MARKER + ":0" + " " +
|
|
||||||
"channelID=\"" + str(channel['id']) + "\" " +
|
|
||||||
"tvg-chno=\"" + str(channel['number']) + "\" " +
|
|
||||||
"tvg-name=\"" + str(channel['name']) + "\" " +
|
|
||||||
"tvg-id=\"" + str(channel['number']) + "\" " +
|
|
||||||
"tvg-logo=\"" + logourl + "\" " +
|
|
||||||
"group-title=\"" + self.config.dict["fhdhr"]["friendlyname"] + "," + str(channel['name']))
|
|
||||||
)
|
|
||||||
|
|
||||||
fakefile.write(
|
|
||||||
"%s\n" % (
|
|
||||||
('%s%s/auto/v%s' %
|
|
||||||
("http://",
|
|
||||||
base_url,
|
|
||||||
str(channel['number'])))
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
return fakefile.getvalue()
|
|
||||||
@ -1,80 +0,0 @@
|
|||||||
from io import StringIO
|
|
||||||
import urllib.parse
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
class Cluster_HTML():
|
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
|
||||||
|
|
||||||
def get_cluster_html(self, base_url, force_update=False):
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
|
|
||||||
page_elements = self.page_elements.get()
|
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
fakefile.write("<h4 style=\"text-align: center;\">Cluster</h4>")
|
|
||||||
fakefile.write("\n")
|
|
||||||
|
|
||||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
|
||||||
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/cluster?method=scan", "Force Scan"))
|
|
||||||
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/cluster?method=disconnect", "Disconnect"))
|
|
||||||
fakefile.write("</div><br>\n")
|
|
||||||
|
|
||||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
fakefile.write(" <th>Name</th>\n")
|
|
||||||
fakefile.write(" <th>Location</th>\n")
|
|
||||||
fakefile.write(" <th>Joined</th>\n")
|
|
||||||
fakefile.write(" <th>Options</th>\n")
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
fhdhr_list = self.device.cluster.get_list()
|
|
||||||
for location in list(fhdhr_list.keys()):
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
|
|
||||||
if location in list(self.device.cluster.cluster.keys()):
|
|
||||||
location_name = self.device.cluster.cluster[location]["name"]
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
location_info_url = location + "/discover.json"
|
|
||||||
locatation_info_req = requests.get(location_info_url)
|
|
||||||
location_info = locatation_info_req.json()
|
|
||||||
location_name = location_info["FriendlyName"]
|
|
||||||
except requests.exceptions.ConnectionError:
|
|
||||||
print("Unreachable: " + location)
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(location_name)))
|
|
||||||
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(location)))
|
|
||||||
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(fhdhr_list[location]["Joined"])))
|
|
||||||
|
|
||||||
fakefile.write(" <td>\n")
|
|
||||||
fakefile.write(" <div>\n")
|
|
||||||
location_url_query = urllib.parse.quote(location)
|
|
||||||
fakefile.write(
|
|
||||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
|
||||||
(location, "Visit"))
|
|
||||||
if not fhdhr_list[location]["Joined"]:
|
|
||||||
fakefile.write(
|
|
||||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
|
||||||
("/cluster?method=add&location=" + location_url_query, "Add"))
|
|
||||||
else:
|
|
||||||
fakefile.write(
|
|
||||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
|
||||||
("/cluster?method=del&location=" + location_url_query, "Remove"))
|
|
||||||
fakefile.write(" </div>\n")
|
|
||||||
fakefile.write(" </td>\n")
|
|
||||||
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
for line in page_elements["end"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
return fakefile.getvalue()
|
|
||||||
@ -1,45 +0,0 @@
|
|||||||
from io import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class Diagnostics_HTML():
|
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
self.diagnostics_html = None
|
|
||||||
self.page_elements = page_elements
|
|
||||||
|
|
||||||
def get_diagnostics_html(self, base_url, force_update=False):
|
|
||||||
if not self.diagnostics_html or force_update:
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
page_elements = self.page_elements.get()
|
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
# a list of 2 part lists containing button information
|
|
||||||
button_list = [
|
|
||||||
["Force Channel Update", "chanscan"],
|
|
||||||
["debug", "debug.json"],
|
|
||||||
["device.xml", "device.xml"],
|
|
||||||
["discover.json", "discover.json"],
|
|
||||||
["lineup.json", "lineup.json"],
|
|
||||||
["lineup_status.json", "lineup_status.json"],
|
|
||||||
["cluster.json", "cluster.json"]
|
|
||||||
]
|
|
||||||
|
|
||||||
for button_item in button_list:
|
|
||||||
button_label = button_item[0]
|
|
||||||
button_path = button_item[1]
|
|
||||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
|
||||||
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
|
||||||
fakefile.write("</div>\n")
|
|
||||||
fakefile.write("\n")
|
|
||||||
|
|
||||||
for line in page_elements["end"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
self.diagnostics_html = fakefile.getvalue()
|
|
||||||
|
|
||||||
return self.diagnostics_html
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
from io import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class Origin_HTML():
|
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
|
||||||
|
|
||||||
def get_origin_html(self, base_url, force_update=False):
|
|
||||||
|
|
||||||
servicename = str(self.config.dict["main"]["servicename"])
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
page_elements = self.page_elements.get()
|
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
fakefile.write("<h4 style=\"text-align: center;\">%s Status</h4>" % (servicename))
|
|
||||||
fakefile.write("\n")
|
|
||||||
|
|
||||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
fakefile.write(" <th></th>\n")
|
|
||||||
fakefile.write(" <th></th>\n")
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
origin_status_dict = self.device.channels.get_origin_status()
|
|
||||||
for key in list(origin_status_dict.keys()):
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(key)))
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(origin_status_dict[key])))
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
for line in page_elements["end"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
return fakefile.getvalue()
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
from io import StringIO
|
|
||||||
|
|
||||||
from fHDHR import fHDHR_VERSION
|
|
||||||
|
|
||||||
|
|
||||||
class Version_HTML():
|
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
|
||||||
|
|
||||||
def get_version_html(self, base_url, force_update=False):
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
page_elements = self.page_elements.get()
|
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
fakefile.write(" <th></th>\n")
|
|
||||||
fakefile.write(" <th></th>\n")
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
fakefile.write(" <tr>\n")
|
|
||||||
fakefile.write(" <td>%s</td>\n" % ("fHDHR"))
|
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(fHDHR_VERSION)))
|
|
||||||
fakefile.write(" </tr>\n")
|
|
||||||
|
|
||||||
for line in page_elements["end"]:
|
|
||||||
fakefile.write(line + "\n")
|
|
||||||
|
|
||||||
return fakefile.getvalue()
|
|
||||||
@ -1,15 +1,14 @@
|
|||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import time
|
|
||||||
import argparse
|
import argparse
|
||||||
from multiprocessing import Process
|
import time
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
from fHDHR import fHDHR_VERSION
|
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||||
import fHDHR.exceptions
|
import fHDHR.exceptions
|
||||||
import fHDHR.config
|
import fHDHR.config
|
||||||
|
from fHDHR.http import fHDHR_HTTP_Server
|
||||||
import fHDHR.origin
|
from fHDHR.db import fHDHRdb
|
||||||
import fHDHR.api
|
|
||||||
|
|
||||||
ERR_CODE = 1
|
ERR_CODE = 1
|
||||||
ERR_CODE_NO_RESTART = 2
|
ERR_CODE_NO_RESTART = 2
|
||||||
@ -22,7 +21,6 @@ if sys.version_info.major == 2 or sys.version_info < (3, 3):
|
|||||||
|
|
||||||
def build_args_parser():
|
def build_args_parser():
|
||||||
"""Build argument parser for fHDHR"""
|
"""Build argument parser for fHDHR"""
|
||||||
print("Validating CLI Argument")
|
|
||||||
parser = argparse.ArgumentParser(description='fHDHR')
|
parser = argparse.ArgumentParser(description='fHDHR')
|
||||||
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
@ -34,16 +32,33 @@ def get_configuration(args, script_dir):
|
|||||||
return fHDHR.config.Config(args.cfg, script_dir)
|
return fHDHR.config.Config(args.cfg, script_dir)
|
||||||
|
|
||||||
|
|
||||||
def run(settings, origin):
|
def run(settings, logger, db):
|
||||||
|
|
||||||
fhdhrweb = Process(target=fHDHR.api.interface_start, args=(settings, origin))
|
fhdhr = fHDHR_OBJ(settings, logger, db)
|
||||||
fhdhrweb.start()
|
fhdhrweb = fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
print(settings.dict["fhdhr"]["friendlyname"] + " is now running!")
|
try:
|
||||||
|
|
||||||
# wait forever
|
print("HTTP Server Starting")
|
||||||
while True:
|
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
|
||||||
time.sleep(3600)
|
fhdhr_web.start()
|
||||||
|
|
||||||
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
print("SSDP Server Starting")
|
||||||
|
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
|
||||||
|
fhdhr_ssdp.start()
|
||||||
|
|
||||||
|
if settings.dict["epg"]["method"]:
|
||||||
|
print("EPG Update Starting")
|
||||||
|
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
|
||||||
|
fhdhr_epg.start()
|
||||||
|
|
||||||
|
# wait forever
|
||||||
|
while True:
|
||||||
|
time.sleep(3600)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
return ERR_CODE
|
return ERR_CODE
|
||||||
|
|
||||||
@ -57,13 +72,11 @@ def start(args, script_dir):
|
|||||||
print(e)
|
print(e)
|
||||||
return ERR_CODE_NO_RESTART
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
try:
|
logger = settings.logging_setup()
|
||||||
origin = fHDHR.origin.origin_channels.OriginService(settings)
|
|
||||||
except fHDHR.exceptions.OriginSetupError as e:
|
|
||||||
print(e)
|
|
||||||
return ERR_CODE_NO_RESTART
|
|
||||||
|
|
||||||
return run(settings, origin)
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
|
return run(settings, logger, db)
|
||||||
|
|
||||||
|
|
||||||
def main(script_dir):
|
def main(script_dir):
|
||||||
|
|||||||
@ -2,6 +2,8 @@ import os
|
|||||||
import random
|
import random
|
||||||
import configparser
|
import configparser
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
|
||||||
import fHDHR.exceptions
|
import fHDHR.exceptions
|
||||||
from fHDHR.tools import isint, isfloat, is_arithmetic
|
from fHDHR.tools import isint, isfloat, is_arithmetic
|
||||||
@ -19,13 +21,8 @@ class Config():
|
|||||||
print("Loading Configuration File: " + str(self.config_file))
|
print("Loading Configuration File: " + str(self.config_file))
|
||||||
self.read_config(self.config_file)
|
self.read_config(self.config_file)
|
||||||
|
|
||||||
print("Verifying Configuration settings.")
|
|
||||||
self.config_verification()
|
self.config_verification()
|
||||||
|
|
||||||
print("Server is set to run on " +
|
|
||||||
str(self.dict["fhdhr"]["address"]) + ":" +
|
|
||||||
str(self.dict["fhdhr"]["port"]))
|
|
||||||
|
|
||||||
def load_defaults(self, script_dir):
|
def load_defaults(self, script_dir):
|
||||||
|
|
||||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
@ -109,36 +106,40 @@ class Config():
|
|||||||
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
||||||
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
||||||
|
|
||||||
if self.dict["fhdhr"]["epg_method"] and self.dict["fhdhr"]["epg_method"] not in ["None"]:
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
if self.dict["fhdhr"]["epg_method"] == self.dict["main"]["dictpopname"]:
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
self.dict["fhdhr"]["epg_method"] = "origin"
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
elif self.dict["fhdhr"]["epg_method"] not in self.dict["main"]["valid_epg_methods"]:
|
epg_methods = []
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
else:
|
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||||
print("EPG Method not set, will not create EPG/xmltv")
|
epg_methods.append("origin")
|
||||||
|
elif epg_method in ["None"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
elif epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
|
||||||
# generate UUID here for when we are not using docker
|
# generate UUID here for when we are not using docker
|
||||||
if not self.dict["main"]["uuid"]:
|
if not self.dict["main"]["uuid"]:
|
||||||
print("No UUID found. Generating one now...")
|
|
||||||
# from https://pynative.com/python-generate-random-string/
|
# from https://pynative.com/python-generate-random-string/
|
||||||
# create a string that wouldn't be a real device uuid for
|
# create a string that wouldn't be a real device uuid for
|
||||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||||
print("UUID set to: " + str(self.dict["main"]["uuid"]) + "...")
|
|
||||||
else:
|
|
||||||
print("UUID read as: " + str(self.dict["main"]["uuid"]) + "...")
|
|
||||||
|
|
||||||
if self.dict["main"]["cache_dir"]:
|
if self.dict["main"]["cache_dir"]:
|
||||||
print("Verifying cache directory...")
|
|
||||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
self.dict["filedir"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
self.dict["filedir"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
print("Cache set to " + str(self.dict["filedir"]["cache_dir"]))
|
|
||||||
cache_dir = self.dict["filedir"]["cache_dir"]
|
cache_dir = self.dict["filedir"]["cache_dir"]
|
||||||
|
|
||||||
self.dict["main"]["channel_numbers"] = pathlib.Path(cache_dir).joinpath("cnumbers.json")
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
self.dict["main"]["ssdp_detect"] = pathlib.Path(cache_dir).joinpath("ssdp_list.json")
|
self.dict["filedir"]["logs_dir"] = logs_dir
|
||||||
self.dict["main"]["cluster"] = pathlib.Path(cache_dir).joinpath("cluster.json")
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
for epg_method in self.dict["main"]["valid_epg_methods"]:
|
for epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||||
if epg_method and epg_method != "None":
|
if epg_method and epg_method != "None":
|
||||||
@ -157,8 +158,49 @@ class Config():
|
|||||||
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg"]:
|
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg"]:
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
try:
|
||||||
|
ffmpeg_command = [self.dict["ffmpeg"]["ffmpeg_path"],
|
||||||
|
"-version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
ffmpeg_version = None
|
||||||
|
self.dict["ffmpeg"]["version"] = ffmpeg_version
|
||||||
|
|
||||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
self.dict["fhdhr"]["discovery_address"] = None
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
print("SSDP Server disabled.")
|
|
||||||
|
def logging_setup(self):
|
||||||
|
|
||||||
|
log_level = self.dict["logging"]["level"].upper()
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||||
|
logger = logging.getLogger('fHDHR')
|
||||||
|
log_file = os.path.join(self.dict["filedir"]["logs_dir"], 'fHDHR.log')
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler(log_file)
|
||||||
|
# c_handler.setLevel(log_level)
|
||||||
|
f_handler.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create formatters and add it to handlers
|
||||||
|
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
# c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
# logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
return logger
|
||||||
|
|||||||
405
fHDHR/db/__init__.py
Normal file
405
fHDHR/db/__init__.py
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from sqlalchemy import Column, create_engine, String, Text
|
||||||
|
from sqlalchemy.engine.url import URL
|
||||||
|
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize(value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
# sqlite likes to return ints for strings that look like ints, even though
|
||||||
|
# the column type is string. That's how you do dynamic typing wrong.
|
||||||
|
value = str(value)
|
||||||
|
# Just in case someone's mucking with the DB in a way we can't account for,
|
||||||
|
# ignore json parsing errors
|
||||||
|
try:
|
||||||
|
value = json.loads(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
BASE = declarative_base()
|
||||||
|
MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
||||||
|
'mysql_charset': 'utf8mb4',
|
||||||
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelValues(BASE):
|
||||||
|
__tablename__ = 'channel_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
channel = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class ProgramValues(BASE):
|
||||||
|
__tablename__ = 'program_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
program = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class CacheValues(BASE):
|
||||||
|
__tablename__ = 'cache_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
cacheitem = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRValues(BASE):
|
||||||
|
__tablename__ = 'fhdhr_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
item = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRdb(object):
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
# MySQL - mysql://username:password@localhost/db
|
||||||
|
# SQLite - sqlite:////cache/path/default.db
|
||||||
|
self.type = self.config.dict["database"]["type"]
|
||||||
|
|
||||||
|
# Handle SQLite explicitly as a default
|
||||||
|
if self.type == 'sqlite':
|
||||||
|
path = self.config.dict["database"]["path"]
|
||||||
|
path = os.path.expanduser(path)
|
||||||
|
self.filename = path
|
||||||
|
self.url = 'sqlite:///%s' % path
|
||||||
|
# Otherwise, handle all other database engines
|
||||||
|
else:
|
||||||
|
query = {}
|
||||||
|
if self.type == 'mysql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mysql'
|
||||||
|
query = {'charset': 'utf8mb4'}
|
||||||
|
elif self.type == 'postgres':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'postgresql'
|
||||||
|
elif self.type == 'oracle':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'oracle'
|
||||||
|
elif self.type == 'mssql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mssql+pymssql'
|
||||||
|
elif self.type == 'firebird':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'firebird+fdb'
|
||||||
|
elif self.type == 'sybase':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'sybase+pysybase'
|
||||||
|
else:
|
||||||
|
raise Exception('Unknown db_type')
|
||||||
|
|
||||||
|
db_user = self.config.dict["database"]["user"]
|
||||||
|
db_pass = self.config.dict["database"]["pass"]
|
||||||
|
db_host = self.config.dict["database"]["host"]
|
||||||
|
db_port = self.config.dict["database"]["prt"] # Optional
|
||||||
|
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
|
||||||
|
|
||||||
|
# Ensure we have all our variables defined
|
||||||
|
if db_user is None or db_pass is None or db_host is None:
|
||||||
|
raise Exception('Please make sure the following core '
|
||||||
|
'configuration values are defined: '
|
||||||
|
'db_user, db_pass, db_host')
|
||||||
|
self.url = URL(drivername=drivername, username=db_user,
|
||||||
|
password=db_pass, host=db_host, port=db_port,
|
||||||
|
database=db_name, query=query)
|
||||||
|
|
||||||
|
self.engine = create_engine(self.url, pool_recycle=3600)
|
||||||
|
|
||||||
|
# Catch any errors connecting to database
|
||||||
|
try:
|
||||||
|
self.engine.connect()
|
||||||
|
except OperationalError:
|
||||||
|
print("OperationalError: Unable to connect to database.")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create our tables
|
||||||
|
BASE.metadata.create_all(self.engine)
|
||||||
|
|
||||||
|
self.ssession = scoped_session(sessionmaker(bind=self.engine))
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
if self.type != 'sqlite':
|
||||||
|
print(
|
||||||
|
"Raw connection requested when 'db_type' is not 'sqlite':\n"
|
||||||
|
"Consider using 'db.session()' to get a SQLAlchemy session "
|
||||||
|
"instead here:\n%s",
|
||||||
|
traceback.format_list(traceback.extract_stack()[:-1])[-1][:-1])
|
||||||
|
return self.engine.raw_connection()
|
||||||
|
|
||||||
|
def session(self):
|
||||||
|
return self.ssession()
|
||||||
|
|
||||||
|
def execute(self, *args, **kwargs):
|
||||||
|
return self.engine.execute(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_uri(self):
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Channel Values
|
||||||
|
|
||||||
|
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_channelvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Program Values
|
||||||
|
|
||||||
|
def set_program_value(self, program, key, value, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_programvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Cache Values
|
||||||
|
|
||||||
|
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# fHDHR Values
|
||||||
|
|
||||||
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
29
fHDHR/device/__init__.py
Normal file
29
fHDHR/device/__init__.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from .channels import Channels
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .watch import WatchStream
|
||||||
|
from .images import imageHandler
|
||||||
|
from .station_scan import Station_Scan
|
||||||
|
from .ssdp import SSDPServer
|
||||||
|
from .cluster import fHDHR_Cluster
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Device():
|
||||||
|
|
||||||
|
def __init__(self, settings, fhdhr_version, origin, logger, web, db):
|
||||||
|
|
||||||
|
self.channels = Channels(settings, origin, logger, db)
|
||||||
|
|
||||||
|
self.epg = EPG(settings, self.channels, origin, logger, web, db)
|
||||||
|
|
||||||
|
self.tuners = Tuners(settings, self.epg, logger)
|
||||||
|
|
||||||
|
self.watch = WatchStream(settings, self.channels, self.tuners, logger, web)
|
||||||
|
|
||||||
|
self.images = imageHandler(settings, self.epg, logger, web)
|
||||||
|
|
||||||
|
self.station_scan = Station_Scan(settings, self.channels, logger, db)
|
||||||
|
|
||||||
|
self.ssdp = SSDPServer(settings, fhdhr_version, logger, db)
|
||||||
|
|
||||||
|
self.cluster = fHDHR_Cluster(settings, self.ssdp, logger, db, web)
|
||||||
@ -1,6 +1,4 @@
|
|||||||
import os
|
|
||||||
import datetime
|
import datetime
|
||||||
import json
|
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from fHDHR.tools import hours_between_datetime
|
from fHDHR.tools import hours_between_datetime
|
||||||
@ -8,49 +6,40 @@ from fHDHR.tools import hours_between_datetime
|
|||||||
|
|
||||||
class ChannelNumbers():
|
class ChannelNumbers():
|
||||||
|
|
||||||
def __init__(self, settings):
|
def __init__(self, settings, logger, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
self.channel_numbers_file = self.config.dict["main"]["channel_numbers"]
|
def get_number(self, channel_id):
|
||||||
|
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
|
||||||
self.cnumbers = {}
|
if channel_id in list(cnumbers.keys()):
|
||||||
self.load_channel_list()
|
return cnumbers[channel_id]
|
||||||
|
|
||||||
def get_number(self, channel_name):
|
|
||||||
if channel_name in list(self.cnumbers.keys()):
|
|
||||||
return self.cnumbers[channel_name]
|
|
||||||
|
|
||||||
used_numbers = []
|
used_numbers = []
|
||||||
for channel_name in list(self.cnumbers.keys()):
|
for channel_id in list(cnumbers.keys()):
|
||||||
used_numbers.append(self.cnumbers[channel_name])
|
used_numbers.append(cnumbers[channel_id])
|
||||||
|
|
||||||
for i in range(1, 1000):
|
for i in range(1, 1000):
|
||||||
if str(float(i)) not in used_numbers:
|
if str(float(i)) not in used_numbers:
|
||||||
break
|
break
|
||||||
return str(float(i))
|
return str(float(i))
|
||||||
|
|
||||||
def set_number(self, channel_name, channel_number):
|
def set_number(self, channel_id, channel_number):
|
||||||
self.cnumbers[channel_name] = str(float(channel_number))
|
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
|
||||||
|
cnumbers[channel_id] = str(float(channel_number))
|
||||||
def load_channel_list(self):
|
self.db.set_fhdhr_value("channel_numbers", "list", cnumbers)
|
||||||
if os.path.isfile(self.channel_numbers_file):
|
|
||||||
print("Loading Previously Saved Channel Numbers.")
|
|
||||||
with open(self.channel_numbers_file, 'r') as cnumbersfile:
|
|
||||||
self.cnumbers = json.load(cnumbersfile)
|
|
||||||
|
|
||||||
def save_channel_list(self):
|
|
||||||
print("Saving Channel Numbers.")
|
|
||||||
with open(self.channel_numbers_file, 'w') as cnumbersfile:
|
|
||||||
cnumbersfile.write(json.dumps(self.cnumbers, indent=4))
|
|
||||||
|
|
||||||
|
|
||||||
class Channels():
|
class Channels():
|
||||||
|
|
||||||
def __init__(self, settings, origin):
|
def __init__(self, settings, origin, logger, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.origin = origin
|
self.origin = origin
|
||||||
|
self.db = db
|
||||||
|
|
||||||
self.channel_numbers = ChannelNumbers(settings)
|
self.channel_numbers = ChannelNumbers(settings, logger, db)
|
||||||
|
|
||||||
self.list = {}
|
self.list = {}
|
||||||
self.list_update_time = None
|
self.list_update_time = None
|
||||||
@ -83,9 +72,8 @@ class Channels():
|
|||||||
channel_dict_list = self.verify_channel_info(channel_dict_list)
|
channel_dict_list = self.verify_channel_info(channel_dict_list)
|
||||||
self.append_channel_info(channel_dict_list)
|
self.append_channel_info(channel_dict_list)
|
||||||
if not self.list_update_time:
|
if not self.list_update_time:
|
||||||
print("Found " + str(len(self.list)) + " channels for " + str(self.config.dict["main"]["servicename"]))
|
self.logger.info("Found " + str(len(self.list)) + " channels for " + str(self.config.dict["main"]["servicename"]))
|
||||||
self.list_update_time = datetime.datetime.now()
|
self.list_update_time = datetime.datetime.now()
|
||||||
self.channel_numbers.save_channel_list()
|
|
||||||
|
|
||||||
channel_list = []
|
channel_list = []
|
||||||
for chandict in list(self.list.keys()):
|
for chandict in list(self.list.keys()):
|
||||||
@ -127,9 +115,8 @@ class Channels():
|
|||||||
return next(item for item in chanlist if item[keyfind] == valfind)
|
return next(item for item in chanlist if item[keyfind] == valfind)
|
||||||
|
|
||||||
def get_fhdhr_stream_url(self, base_url, channel_number):
|
def get_fhdhr_stream_url(self, base_url, channel_number):
|
||||||
return ('%s%s/auto/v%s' %
|
return ('%s/auto/v%s' %
|
||||||
("http://",
|
(base_url,
|
||||||
base_url,
|
|
||||||
channel_number))
|
channel_number))
|
||||||
|
|
||||||
def verify_channel_info(self, channel_dict_list):
|
def verify_channel_info(self, channel_dict_list):
|
||||||
@ -141,10 +128,10 @@ class Channels():
|
|||||||
if "id" not in list(station_item.keys()):
|
if "id" not in list(station_item.keys()):
|
||||||
station_item["id"] = station_item["name"]
|
station_item["id"] = station_item["name"]
|
||||||
if "number" not in list(station_item.keys()):
|
if "number" not in list(station_item.keys()):
|
||||||
station_item["number"] = self.channel_numbers.get_number(station_item["name"])
|
station_item["number"] = self.channel_numbers.get_number(station_item["id"])
|
||||||
else:
|
else:
|
||||||
station_item["number"] = str(float(station_item["number"]))
|
station_item["number"] = str(float(station_item["number"]))
|
||||||
self.channel_numbers.set_number(station_item["name"], station_item["number"])
|
self.channel_numbers.set_number(station_item["id"], station_item["number"])
|
||||||
cleaned_channel_dict_list.append(station_item)
|
cleaned_channel_dict_list.append(station_item)
|
||||||
return cleaned_channel_dict_list
|
return cleaned_channel_dict_list
|
||||||
|
|
||||||
140
fHDHR/device/cluster.py
Normal file
140
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
import urllib.parse
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Cluster():
|
||||||
|
|
||||||
|
def __init__(self, settings, ssdp, logger, db, web):
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.ssdp = ssdp
|
||||||
|
self.db = db
|
||||||
|
self.web = web
|
||||||
|
|
||||||
|
self.friendlyname = self.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
self.location = None
|
||||||
|
self.location_url = None
|
||||||
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
|
||||||
|
str(settings.dict["fhdhr"]["port"]))
|
||||||
|
self.location_url = urllib.parse.quote(self.location)
|
||||||
|
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.ssdp.detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.location] = {
|
||||||
|
"base_url": self.location,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.location not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
sync_url = location + "/api/cluster?method=del&location=" + self.location
|
||||||
|
try:
|
||||||
|
self.web.session.get(sync_url)
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.web.session.get(sync_url)
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
sync_url = location + "/api/cluster?method=sync&location=" + self.location_url
|
||||||
|
try:
|
||||||
|
self.web.session.get(sync_url)
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
try:
|
||||||
|
location_info_req = self.web.session.get(location_info_url)
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["FriendlyName"]
|
||||||
|
|
||||||
|
cluster_info_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.web.session.get(cluster_info_url)
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = location + "/api/cluster?method=leave"
|
||||||
|
try:
|
||||||
|
self.web.session.get(sync_url)
|
||||||
|
except self.web.exceptions.ConnectionError:
|
||||||
|
self.logger.error("Unreachable: " + location)
|
||||||
|
self.push_sync()
|
||||||
|
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
153
fHDHR/device/epg.py
Normal file
153
fHDHR/device/epg.py
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
epgtype_list = []
|
||||||
|
device_dir = os.path.dirname(__file__)
|
||||||
|
for entry in os.scandir(device_dir + '/epgtypes'):
|
||||||
|
if entry.is_file():
|
||||||
|
if entry.name[0] != '_':
|
||||||
|
epgtype_list.append(str(entry.name[:-3]))
|
||||||
|
impstring = f'from .epgtypes import {entry.name}'[:-3]
|
||||||
|
exec(impstring)
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
|
||||||
|
def __init__(self, settings, channels, origin, logger, web, db):
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.origin = origin
|
||||||
|
self.channels = channels
|
||||||
|
self.web = web
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
self.epgdict = {}
|
||||||
|
|
||||||
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
|
self.epg_methods = self.config.dict["epg"]["method"]
|
||||||
|
self.def_method = self.config.dict["epg"]["def_method"]
|
||||||
|
self.sleeptime = {}
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if epg_method in list(self.config.dict.keys()):
|
||||||
|
if "update_frequency" in list(self.config.dict[epg_method].keys()):
|
||||||
|
self.sleeptime[epg_method] = self.config.dict[epg_method]["update_frequency"]
|
||||||
|
if epg_method not in list(self.sleeptime.keys()):
|
||||||
|
self.sleeptime[epg_method] = self.config.dict["epg"]["update_frequency"]
|
||||||
|
|
||||||
|
def whats_on_now(self, channel):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
listings = epgdict[channel]["listing"]
|
||||||
|
for listing in listings:
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
||||||
|
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
if start_time <= nowtime <= end_time:
|
||||||
|
epgitem = epgdict[channel].copy()
|
||||||
|
epgitem["listing"] = [listing]
|
||||||
|
return epgitem
|
||||||
|
return None
|
||||||
|
|
||||||
|
def whats_on_allchans(self):
|
||||||
|
channel_guide_list = []
|
||||||
|
for channel in self.channels.get_channels():
|
||||||
|
whatson = self.whats_on_now(channel["number"])
|
||||||
|
if whatson:
|
||||||
|
channel_guide_list.append(whatson)
|
||||||
|
return channel_guide_list
|
||||||
|
|
||||||
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
if method not in list(self.epgdict.keys()):
|
||||||
|
|
||||||
|
epgdict = self.db.get_fhdhr_value("epg_dict", method) or None
|
||||||
|
if not epgdict:
|
||||||
|
self.update(method)
|
||||||
|
self.epgdict[method] = self.db.get_fhdhr_value("epg_dict", method) or {}
|
||||||
|
else:
|
||||||
|
self.epgdict[method] = epgdict
|
||||||
|
return self.epgdict[method]
|
||||||
|
else:
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
def get_thumbnail(self, itemtype, itemid):
|
||||||
|
if itemtype == "channel":
|
||||||
|
chandict = self.find_channel_dict(itemid)
|
||||||
|
return chandict["thumbnail"]
|
||||||
|
elif itemtype == "content":
|
||||||
|
progdict = self.find_program_dict(itemid)
|
||||||
|
return progdict["thumbnail"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_channel_dict(self, channel_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
channel_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
channel_list.append(epgdict[channel])
|
||||||
|
return next(item for item in channel_list if item["id"] == channel_id)
|
||||||
|
|
||||||
|
def find_program_dict(self, event_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
event_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
event_list.extend(epgdict[channel]["listing"])
|
||||||
|
return next(item for item in event_list if item["id"] == event_id)
|
||||||
|
|
||||||
|
def epg_method_selfadd(self):
|
||||||
|
for method in epgtype_list:
|
||||||
|
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.config, self.channels, self.logger, self.web, self.db)"))
|
||||||
|
|
||||||
|
def update(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.logger.info("Updating " + epgtypename + " EPG cache.")
|
||||||
|
method_to_call = getattr(self, method)
|
||||||
|
func_to_call = getattr(method_to_call, 'update_epg')
|
||||||
|
if method == 'origin':
|
||||||
|
programguide = func_to_call(self.channels)
|
||||||
|
else:
|
||||||
|
programguide = func_to_call()
|
||||||
|
|
||||||
|
for chan in list(programguide.keys()):
|
||||||
|
floatnum = str(float(chan))
|
||||||
|
programguide[floatnum] = programguide.pop(chan)
|
||||||
|
programguide[floatnum]["number"] = floatnum
|
||||||
|
|
||||||
|
programguide = OrderedDict(sorted(programguide.items()))
|
||||||
|
|
||||||
|
for cnum in programguide:
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
|
||||||
|
self.epgdict = programguide
|
||||||
|
self.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
|
self.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
|
self.logger.info("Wrote " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
self.update(epg_method)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if time.time() >= (self.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
|
||||||
|
self.update(epg_method)
|
||||||
|
time.sleep(3600)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
@ -3,9 +3,11 @@ import datetime
|
|||||||
|
|
||||||
class blocksEPG():
|
class blocksEPG():
|
||||||
|
|
||||||
def __init__(self, settings, channels):
|
def __init__(self, settings, channels, logger, web, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
self.db = db
|
||||||
|
|
||||||
def update_epg(self):
|
def update_epg(self):
|
||||||
programguide = {}
|
programguide = {}
|
||||||
@ -35,7 +37,7 @@ class blocksEPG():
|
|||||||
"name": c["name"],
|
"name": c["name"],
|
||||||
"number": c["number"],
|
"number": c["number"],
|
||||||
"id": c["id"],
|
"id": c["id"],
|
||||||
"thumbnail": ("/images?source=generate&message=%s" % (str(c['number']))),
|
"thumbnail": ("/api/images?method=generate&type=channel&message=%s" % (str(c['number']))),
|
||||||
"listing": [],
|
"listing": [],
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -44,7 +46,7 @@ class blocksEPG():
|
|||||||
"time_start": timestamp['time_start'],
|
"time_start": timestamp['time_start'],
|
||||||
"time_end": timestamp['time_end'],
|
"time_end": timestamp['time_end'],
|
||||||
"duration_minutes": 60,
|
"duration_minutes": 60,
|
||||||
"thumbnail": ("/images?source=generate&message=%s" % (str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
|
"thumbnail": ("/api/images?method=generate&type=content&message=%s" % (str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
|
||||||
"title": "Unavailable",
|
"title": "Unavailable",
|
||||||
"sub-title": "Unavailable",
|
"sub-title": "Unavailable",
|
||||||
"description": "Unavailable",
|
"description": "Unavailable",
|
||||||
@ -3,23 +3,25 @@ import time
|
|||||||
import datetime
|
import datetime
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
|
||||||
from fHDHR.tools import xmldictmaker, WebReq
|
from fHDHR.tools import xmldictmaker
|
||||||
from fHDHR.exceptions import EPGSetupError
|
from fHDHR.exceptions import EPGSetupError
|
||||||
|
|
||||||
|
|
||||||
class zap2itEPG():
|
class zap2itEPG():
|
||||||
|
|
||||||
def __init__(self, settings, channels):
|
def __init__(self, settings, channels, logger, web, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
self.web = WebReq()
|
self.web = web
|
||||||
|
self.db = db
|
||||||
|
|
||||||
self.postalcode = self.config.dict["zap2it"]["postalcode"]
|
self.postalcode = self.config.dict["zap2it"]["postalcode"]
|
||||||
|
|
||||||
self.web_cache_dir = self.config.dict["filedir"]["epg_cache"]["zap2it"]["web_cache"]
|
self.web_cache_dir = self.config.dict["filedir"]["epg_cache"]["zap2it"]["web_cache"]
|
||||||
|
|
||||||
def get_location(self):
|
def get_location(self):
|
||||||
print("Zap2it postalcode not set, attempting to retrieve.")
|
self.logger.warning("Zap2it postalcode not set, attempting to retrieve.")
|
||||||
if not self.postalcode:
|
if not self.postalcode:
|
||||||
try:
|
try:
|
||||||
postalcode_url = 'http://ipinfo.io/json'
|
postalcode_url = 'http://ipinfo.io/json'
|
||||||
@ -130,11 +132,11 @@ class zap2itEPG():
|
|||||||
def get_cached(self, cache_key, delay, url):
|
def get_cached(self, cache_key, delay, url):
|
||||||
cache_path = self.web_cache_dir.joinpath(cache_key)
|
cache_path = self.web_cache_dir.joinpath(cache_key)
|
||||||
if cache_path.is_file():
|
if cache_path.is_file():
|
||||||
print('FROM CACHE:', str(cache_path))
|
self.logger.info('FROM CACHE: ' + str(cache_path))
|
||||||
with open(cache_path, 'rb') as f:
|
with open(cache_path, 'rb') as f:
|
||||||
return f.read()
|
return f.read()
|
||||||
else:
|
else:
|
||||||
print('Fetching: ', url)
|
self.logger.info('Fetching: ' + url)
|
||||||
resp = self.web.session.get(url)
|
resp = self.web.session.get(url)
|
||||||
result = resp.content
|
result = resp.content
|
||||||
with open(cache_path, 'wb') as f:
|
with open(cache_path, 'wb') as f:
|
||||||
@ -149,7 +151,7 @@ class zap2itEPG():
|
|||||||
if t >= zap_time:
|
if t >= zap_time:
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(e)
|
self.logger.error(e)
|
||||||
pass
|
pass
|
||||||
print('Removing stale cache file:', p.name)
|
self.logger.info('Removing stale cache file: ' + p.name)
|
||||||
p.unlink()
|
p.unlink()
|
||||||
@ -1,5 +1,4 @@
|
|||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import requests
|
|
||||||
import PIL.Image
|
import PIL.Image
|
||||||
import PIL.ImageDraw
|
import PIL.ImageDraw
|
||||||
import PIL.ImageFont
|
import PIL.ImageFont
|
||||||
@ -7,32 +6,18 @@ import PIL.ImageFont
|
|||||||
|
|
||||||
class imageHandler():
|
class imageHandler():
|
||||||
|
|
||||||
def __init__(self, settings, epg):
|
def __init__(self, settings, epg, logger, web):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
|
self.web = web
|
||||||
|
|
||||||
def get_image(self, request_args):
|
def get_epg_image(self, image_type, content_id):
|
||||||
|
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
|
||||||
if 'source' not in list(request_args.keys()):
|
|
||||||
image = self.generate_image("content", "Unknown Request")
|
|
||||||
|
|
||||||
elif request_args["source"] == "epg":
|
|
||||||
image = self.get_epg_image(request_args)
|
|
||||||
elif request_args["source"] == "generate":
|
|
||||||
image = self.generate_image(request_args["type"], request_args["message"])
|
|
||||||
else:
|
|
||||||
image = self.generate_image("content", "Unknown Request")
|
|
||||||
|
|
||||||
imagetype = self.get_image_type(image)
|
|
||||||
|
|
||||||
return image, imagetype
|
|
||||||
|
|
||||||
def get_epg_image(self, request_args):
|
|
||||||
imageUri = self.epg.get_thumbnail(request_args["type"], str(request_args["id"]))
|
|
||||||
if not imageUri:
|
if not imageUri:
|
||||||
return self.generate_image(request_args["type"], str(request_args["id"]))
|
return self.generate_image(image_type, str(content_id))
|
||||||
|
|
||||||
req = requests.get(imageUri)
|
req = self.web.session.get(imageUri)
|
||||||
return req.content
|
return req.content
|
||||||
|
|
||||||
def getSize(self, txt, font):
|
def getSize(self, txt, font):
|
||||||
@ -1,40 +1,33 @@
|
|||||||
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
import os
|
|
||||||
import socket
|
import socket
|
||||||
import struct
|
import struct
|
||||||
import json
|
|
||||||
from multiprocessing import Process
|
|
||||||
|
|
||||||
from fHDHR import fHDHR_VERSION
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Detect():
|
class fHDHR_Detect():
|
||||||
|
|
||||||
def __init__(self, settings):
|
def __init__(self, settings, logger, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
self.ssdp_detect_file = self.config.dict["main"]["ssdp_detect"]
|
self.db = db
|
||||||
self.detect_list = []
|
self.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
def set(self, location):
|
def set(self, location):
|
||||||
if location not in self.detect_list:
|
detect_list = self.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
self.detect_list.append(location)
|
if location not in detect_list:
|
||||||
with open(self.ssdp_detect_file, 'w') as ssdpdetectfile:
|
detect_list.append(location)
|
||||||
ssdpdetectfile.write(json.dumps(self.detect_list, indent=4))
|
self.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
if os.path.isfile(self.ssdp_detect_file):
|
return self.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
with open(self.ssdp_detect_file, 'r') as ssdpdetectfile:
|
|
||||||
return json.load(ssdpdetectfile)
|
|
||||||
else:
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
class SSDPServer():
|
class SSDPServer():
|
||||||
|
|
||||||
def __init__(self, settings):
|
def __init__(self, settings, fhdhr_version, logger, db):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
self.detect_method = fHDHR_Detect(settings)
|
self.detect_method = fHDHR_Detect(settings, logger, db)
|
||||||
|
|
||||||
if settings.dict["fhdhr"]["discovery_address"]:
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
@ -43,7 +36,7 @@ class SSDPServer():
|
|||||||
self.port = 1900
|
self.port = 1900
|
||||||
self.iface = None
|
self.iface = None
|
||||||
self.address = None
|
self.address = None
|
||||||
self.server = 'fHDHR/%s UPnP/1.0' % fHDHR_VERSION
|
self.server = 'fHDHR/%s UPnP/1.0' % fhdhr_version
|
||||||
|
|
||||||
allowed_protos = ("ipv4", "ipv6")
|
allowed_protos = ("ipv4", "ipv6")
|
||||||
if self.proto not in allowed_protos:
|
if self.proto not in allowed_protos:
|
||||||
@ -108,15 +101,10 @@ class SSDPServer():
|
|||||||
self.notify_payload = self.create_notify_payload()
|
self.notify_payload = self.create_notify_payload()
|
||||||
self.msearch_payload = self.create_msearch_payload()
|
self.msearch_payload = self.create_msearch_payload()
|
||||||
|
|
||||||
print("SSDP server Starting")
|
|
||||||
|
|
||||||
self.ssdpserve = Process(target=self.run)
|
|
||||||
self.ssdpserve.start()
|
|
||||||
|
|
||||||
self.m_search()
|
self.m_search()
|
||||||
|
|
||||||
def on_recv(self, data, address):
|
def on_recv(self, data, address):
|
||||||
# print("Received packet from {}: {}".format(address, data))
|
self.logger.debug("Received packet from {}: {}".format(address, data))
|
||||||
|
|
||||||
(host, port) = address
|
(host, port) = address
|
||||||
|
|
||||||
@ -132,25 +120,24 @@ class SSDPServer():
|
|||||||
|
|
||||||
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
# SSDP discovery
|
# SSDP discovery
|
||||||
# print("Received qualifying M-SEARCH from {}".format(address))
|
self.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
# print("M-SEARCH data: {}".format(headers))
|
self.logger.debug("M-SEARCH data: {}".format(headers))
|
||||||
notify = self.notify_payload
|
notify = self.notify_payload
|
||||||
# print("Created NOTIFY: {}".format(notify))
|
self.logger.debug("Created NOTIFY: {}".format(notify))
|
||||||
try:
|
try:
|
||||||
self.sock.sendto(notify, address)
|
self.sock.sendto(notify, address)
|
||||||
except OSError: # as e:
|
except OSError as e:
|
||||||
# Most commonly: We received a multicast from an IP not in our subnet
|
# Most commonly: We received a multicast from an IP not in our subnet
|
||||||
# print("Unable to send NOTIFY to {}: {}".format(address, e))
|
self.logger.debug("Unable to send NOTIFY to {}: {}".format(address, e))
|
||||||
pass
|
pass
|
||||||
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
# SSDP presence
|
# SSDP presence
|
||||||
# print('NOTIFY *')
|
self.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
# print("NOTIFY data: {}".format(headers))
|
|
||||||
if headers["server"].startswith("fHDHR"):
|
if headers["server"].startswith("fHDHR"):
|
||||||
if headers["location"] != self.location:
|
if headers["location"] != self.location:
|
||||||
self.detect_method.set(headers["location"].split("/device.xml")[0])
|
self.detect_method.set(headers["location"].split("/device.xml")[0])
|
||||||
# else:
|
else:
|
||||||
# print('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
self.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
def m_search(self):
|
def m_search(self):
|
||||||
data = self.msearch_payload
|
data = self.msearch_payload
|
||||||
@ -203,5 +190,3 @@ class SSDPServer():
|
|||||||
self.on_recv(data, address)
|
self.on_recv(data, address)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
self.sock.close()
|
self.sock.close()
|
||||||
except Exception:
|
|
||||||
self.sock.close()
|
|
||||||
34
fHDHR/device/station_scan.py
Normal file
34
fHDHR/device/station_scan.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from multiprocessing import Process
|
||||||
|
|
||||||
|
|
||||||
|
class Station_Scan():
|
||||||
|
|
||||||
|
def __init__(self, settings, channels, logger, db):
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.channels = channels
|
||||||
|
self.db = db
|
||||||
|
self.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scan(self):
|
||||||
|
self.logger.info("Channel Scan Requested by Client.")
|
||||||
|
|
||||||
|
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if not scan_status:
|
||||||
|
self.db.set_fhdhr_value("station_scan", "scanning", 1)
|
||||||
|
chanscan = Process(target=self.runscan)
|
||||||
|
chanscan.start()
|
||||||
|
else:
|
||||||
|
self.logger.info("Channel Scan Already In Progress!")
|
||||||
|
|
||||||
|
def runscan(self):
|
||||||
|
self.channels.get_channels(forceupdate=True)
|
||||||
|
self.logger.info("Requested Channel Scan Complete.")
|
||||||
|
self.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scanning(self):
|
||||||
|
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if not scan_status:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
@ -6,7 +6,8 @@ from fHDHR.tools import humanized_time
|
|||||||
|
|
||||||
|
|
||||||
class Tuner():
|
class Tuner():
|
||||||
def __init__(self, inum, epg):
|
def __init__(self, inum, epg, logger):
|
||||||
|
self.logger = logger
|
||||||
self.number = inum
|
self.number = inum
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
self.tuner_lock = threading.Lock()
|
self.tuner_lock = threading.Lock()
|
||||||
@ -16,7 +17,7 @@ class Tuner():
|
|||||||
if self.tuner_lock.locked():
|
if self.tuner_lock.locked():
|
||||||
raise TunerError("Tuner #" + str(self.number) + " is not available.")
|
raise TunerError("Tuner #" + str(self.number) + " is not available.")
|
||||||
|
|
||||||
print("Tuner #" + str(self.number) + " to be used for stream.")
|
self.logger.info("Tuner #" + str(self.number) + " to be used for stream.")
|
||||||
self.tuner_lock.acquire()
|
self.tuner_lock.acquire()
|
||||||
self.status = {
|
self.status = {
|
||||||
"status": "Active",
|
"status": "Active",
|
||||||
@ -27,7 +28,7 @@ class Tuner():
|
|||||||
}
|
}
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
print("Tuner #" + str(self.number) + " Shutting Down.")
|
self.logger.info("Tuner #" + str(self.number) + " Shutting Down.")
|
||||||
self.set_off_status()
|
self.set_off_status()
|
||||||
self.tuner_lock.release()
|
self.tuner_lock.release()
|
||||||
|
|
||||||
@ -47,13 +48,14 @@ class Tuner():
|
|||||||
|
|
||||||
class Tuners():
|
class Tuners():
|
||||||
|
|
||||||
def __init__(self, settings, epg):
|
def __init__(self, settings, epg, logger):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
self.max_tuners = int(self.config.dict["fhdhr"]["tuner_count"])
|
self.max_tuners = int(self.config.dict["fhdhr"]["tuner_count"])
|
||||||
|
|
||||||
for i in range(1, self.max_tuners + 1):
|
for i in range(1, self.max_tuners + 1):
|
||||||
exec("%s = %s" % ("self.tuner_" + str(i), "Tuner(i, epg)"))
|
exec("%s = %s" % ("self.tuner_" + str(i), "Tuner(i, epg, logger)"))
|
||||||
|
|
||||||
def tuner_grab(self, stream_args, tunernum=None):
|
def tuner_grab(self, stream_args, tunernum=None):
|
||||||
tunerselected = None
|
tunerselected = None
|
||||||
@ -2,16 +2,16 @@ import subprocess
|
|||||||
import time
|
import time
|
||||||
|
|
||||||
from fHDHR.exceptions import TunerError
|
from fHDHR.exceptions import TunerError
|
||||||
import fHDHR.tools
|
|
||||||
|
|
||||||
|
|
||||||
class WatchStream():
|
class WatchStream():
|
||||||
|
|
||||||
def __init__(self, settings, origserv, tuners):
|
def __init__(self, settings, origserv, tuners, logger, web):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
self.origserv = origserv
|
self.origserv = origserv
|
||||||
self.tuners = tuners
|
self.tuners = tuners
|
||||||
self.web = fHDHR.tools.WebReq()
|
self.web = web
|
||||||
|
|
||||||
def direct_stream(self, stream_args, tunernum):
|
def direct_stream(self, stream_args, tunernum):
|
||||||
|
|
||||||
@ -28,14 +28,14 @@ class WatchStream():
|
|||||||
|
|
||||||
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
||||||
req.close()
|
req.close()
|
||||||
print("Requested Duration Expired.")
|
self.logger.info("Requested Duration Expired.")
|
||||||
break
|
break
|
||||||
|
|
||||||
yield chunk
|
yield chunk
|
||||||
|
|
||||||
except GeneratorExit:
|
except GeneratorExit:
|
||||||
req.close()
|
req.close()
|
||||||
print("Connection Closed.")
|
self.logger.info("Connection Closed.")
|
||||||
self.tuners.tuner_close(tunernum)
|
self.tuners.tuner_close(tunernum)
|
||||||
|
|
||||||
return generate()
|
return generate()
|
||||||
@ -65,7 +65,7 @@ class WatchStream():
|
|||||||
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
||||||
ffmpeg_proc.terminate()
|
ffmpeg_proc.terminate()
|
||||||
ffmpeg_proc.communicate()
|
ffmpeg_proc.communicate()
|
||||||
print("Requested Duration Expired.")
|
self.logger.info("Requested Duration Expired.")
|
||||||
break
|
break
|
||||||
|
|
||||||
videoData = ffmpeg_proc.stdout.read(bytes_per_read)
|
videoData = ffmpeg_proc.stdout.read(bytes_per_read)
|
||||||
@ -78,12 +78,12 @@ class WatchStream():
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
ffmpeg_proc.terminate()
|
ffmpeg_proc.terminate()
|
||||||
ffmpeg_proc.communicate()
|
ffmpeg_proc.communicate()
|
||||||
print("Connection Closed: " + str(e))
|
self.logger.info("Connection Closed: " + str(e))
|
||||||
|
|
||||||
except GeneratorExit:
|
except GeneratorExit:
|
||||||
ffmpeg_proc.terminate()
|
ffmpeg_proc.terminate()
|
||||||
ffmpeg_proc.communicate()
|
ffmpeg_proc.communicate()
|
||||||
print("Connection Closed.")
|
self.logger.info("Connection Closed.")
|
||||||
self.tuners.tuner_close(tunernum)
|
self.tuners.tuner_close(tunernum)
|
||||||
|
|
||||||
return generate()
|
return generate()
|
||||||
@ -93,11 +93,11 @@ class WatchStream():
|
|||||||
try:
|
try:
|
||||||
tunernum = self.tuners.tuner_grab(stream_args)
|
tunernum = self.tuners.tuner_grab(stream_args)
|
||||||
except TunerError as e:
|
except TunerError as e:
|
||||||
print("A " + stream_args["method"] + " stream request for channel " +
|
self.logger.info("A " + stream_args["method"] + " stream request for channel " +
|
||||||
str(stream_args["channel"]) + " was rejected do to " + str(e))
|
str(stream_args["channel"]) + " was rejected do to " + str(e))
|
||||||
return
|
return
|
||||||
|
|
||||||
print("Attempting a " + stream_args["method"] + " stream request for channel " + str(stream_args["channel"]))
|
self.logger.info("Attempting a " + stream_args["method"] + " stream request for channel " + str(stream_args["channel"]))
|
||||||
|
|
||||||
if stream_args["method"] == "ffmpeg":
|
if stream_args["method"] == "ffmpeg":
|
||||||
return self.ffmpeg_stream(stream_args, tunernum)
|
return self.ffmpeg_stream(stream_args, tunernum)
|
||||||
@ -108,7 +108,7 @@ class WatchStream():
|
|||||||
|
|
||||||
stream_args["channelUri"] = self.origserv.get_channel_stream(str(stream_args["channel"]))
|
stream_args["channelUri"] = self.origserv.get_channel_stream(str(stream_args["channel"]))
|
||||||
if not stream_args["channelUri"]:
|
if not stream_args["channelUri"]:
|
||||||
print("Could not Obtain Channel Stream.")
|
self.logger.error("Could not Obtain Channel Stream.")
|
||||||
stream_args["content_type"] = "video/mpeg"
|
stream_args["content_type"] = "video/mpeg"
|
||||||
else:
|
else:
|
||||||
channelUri_headers = self.web.session.head(stream_args["channelUri"]).headers
|
channelUri_headers = self.web.session.head(stream_args["channelUri"]).headers
|
||||||
64
fHDHR/http/__init__.py
Normal file
64
fHDHR/http/__init__.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import Flask
|
||||||
|
|
||||||
|
from .pages import fHDHR_Pages
|
||||||
|
from .files import fHDHR_Files
|
||||||
|
from .api import fHDHR_API
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HTTP_Server():
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.app = Flask("fHDHR")
|
||||||
|
|
||||||
|
self.pages = fHDHR_Pages(fhdhr)
|
||||||
|
self.add_endpoints(self.pages, "pages")
|
||||||
|
|
||||||
|
self.files = fHDHR_Files(fhdhr)
|
||||||
|
self.add_endpoints(self.files, "files")
|
||||||
|
|
||||||
|
self.api = fHDHR_API(fhdhr)
|
||||||
|
self.add_endpoints(self.api, "api")
|
||||||
|
|
||||||
|
def add_endpoints(self, index_list, index_name):
|
||||||
|
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||||
|
for item in item_list:
|
||||||
|
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
|
||||||
|
if isinstance(endpoints, str):
|
||||||
|
endpoints = [endpoints]
|
||||||
|
handler = eval("self." + str(index_name) + "." + str(item))
|
||||||
|
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
|
||||||
|
try:
|
||||||
|
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_methods = ['GET']
|
||||||
|
for endpoint in endpoints:
|
||||||
|
self.add_endpoint(endpoint=endpoint,
|
||||||
|
endpoint_name=endpoint_name,
|
||||||
|
handler=handler,
|
||||||
|
methods=endpoint_methods)
|
||||||
|
|
||||||
|
def isapath(self, item):
|
||||||
|
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
|
||||||
|
if item in not_a_page_list:
|
||||||
|
return False
|
||||||
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||||
|
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.http = WSGIServer((
|
||||||
|
self.fhdhr.config.dict["fhdhr"]["address"],
|
||||||
|
int(self.fhdhr.config.dict["fhdhr"]["port"])
|
||||||
|
), self.app.wsgi_app)
|
||||||
|
try:
|
||||||
|
self.http.serve_forever()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.http.stop()
|
||||||
24
fHDHR/http/api/__init__.py
Normal file
24
fHDHR/http/api/__init__.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
|
||||||
|
from .cluster import Cluster
|
||||||
|
from .channels import Channels
|
||||||
|
from .lineup_post import Lineup_Post
|
||||||
|
from .xmltv import xmlTV
|
||||||
|
from .m3u import M3U
|
||||||
|
from .debug import Debug_JSON
|
||||||
|
|
||||||
|
from .images import Images
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.cluster = Cluster(fhdhr)
|
||||||
|
self.channels = Channels(fhdhr)
|
||||||
|
self.xmltv = xmlTV(fhdhr)
|
||||||
|
self.m3u = M3U(fhdhr)
|
||||||
|
self.debug = Debug_JSON(fhdhr)
|
||||||
|
self.lineup_post = Lineup_Post(fhdhr)
|
||||||
|
|
||||||
|
self.images = Images(fhdhr)
|
||||||
32
fHDHR/http/api/channels.py
Normal file
32
fHDHR/http/api/channels.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from flask import request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
endpoints = ["/api/channels"]
|
||||||
|
endpoint_name = "api_channels"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "scan":
|
||||||
|
self.fhdhr.device.station_scan.scan()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
if method == "scan":
|
||||||
|
return redirect('/lineup_status.json')
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
52
fHDHR/http/api/cluster.py
Normal file
52
fHDHR/http/api/cluster.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster():
|
||||||
|
endpoints = ["/api/cluster"]
|
||||||
|
endpoint_name = "api_cluster"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
location = request.args.get("location", default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||||
|
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.ssdp.m_search()
|
||||||
|
|
||||||
|
elif method == 'add':
|
||||||
|
self.fhdhr.device.cluster.add(location)
|
||||||
|
elif method == 'del':
|
||||||
|
self.fhdhr.device.cluster.remove(location)
|
||||||
|
|
||||||
|
elif method == 'sync':
|
||||||
|
self.fhdhr.device.cluster.sync(location)
|
||||||
|
|
||||||
|
elif method == 'leave':
|
||||||
|
self.fhdhr.device.cluster.leave()
|
||||||
|
elif method == 'disconnect':
|
||||||
|
self.fhdhr.device.cluster.disconnect()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
28
fHDHR/http/api/debug.py
Normal file
28
fHDHR/http/api/debug.py
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
from flask import request, Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Debug_JSON():
|
||||||
|
endpoints = ["/api/debug"]
|
||||||
|
endpoint_name = "api_debug"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
debugjson = {
|
||||||
|
"base_url": base_url,
|
||||||
|
"total channels": self.fhdhr.device.channels.get_station_total(),
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(),
|
||||||
|
}
|
||||||
|
cluster_json = json.dumps(debugjson, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
43
fHDHR/http/api/images.py
Normal file
43
fHDHR/http/api/images.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
from flask import request, Response, abort
|
||||||
|
|
||||||
|
|
||||||
|
class Images():
|
||||||
|
endpoints = ["/api/images"]
|
||||||
|
endpoint_name = "api_images"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
image = None
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
if method == "generate":
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
message = request.args.get('message', default="Unknown Request", type=str)
|
||||||
|
image = self.fhdhr.device.images.generate_image(image_type, message)
|
||||||
|
|
||||||
|
elif method == "get":
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
|
if source in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
|
if image_id:
|
||||||
|
image = self.fhdhr.device.images.get_epg_image(image_type, image_id)
|
||||||
|
|
||||||
|
else:
|
||||||
|
image = self.fhdhr.device.images.generate_image("content", "Unknown Request")
|
||||||
|
|
||||||
|
if image:
|
||||||
|
imagemimetype = self.fhdhr.device.images.get_image_type(image)
|
||||||
|
return Response(image, content_type=imagemimetype, direct_passthrough=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid image request")
|
||||||
31
fHDHR/http/api/lineup_post.py
Normal file
31
fHDHR/http/api/lineup_post.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from flask import request, abort, Response
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Post():
|
||||||
|
endpoints = ["/lineup.post"]
|
||||||
|
endpoint_name = "lineup_post"
|
||||||
|
endpoint_methods = ["POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if 'scan' in list(request.args.keys()):
|
||||||
|
|
||||||
|
if request.args['scan'] == 'start':
|
||||||
|
self.fhdhr.device.station_scan.scan()
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
elif request.args['scan'] == 'abort':
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
|
||||||
|
return abort(200, "Not a valid scan command")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid command")
|
||||||
83
fHDHR/http/api/m3u.py
Normal file
83
fHDHR/http/api/m3u.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class M3U():
|
||||||
|
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||||
|
endpoint_name = "api_m3u"
|
||||||
|
xmltv_xml = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
FORMAT_DESCRIPTOR + " " +
|
||||||
|
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
||||||
|
"x-tvg-url=\"" + xmltvurl + "\"")
|
||||||
|
)
|
||||||
|
|
||||||
|
channel_list = self.fhdhr.device.channels.get_channels()
|
||||||
|
channel_number_list = [x["number"] for x in channel_list]
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
channel_items = channel_list
|
||||||
|
elif channel in channel_number_list:
|
||||||
|
channel_items = [self.fhdhr.device.channels.get_channel_dict("number", channel)]
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
for channel_item in channel_items:
|
||||||
|
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_item['id'])))
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
RECORD_MARKER + ":0" + " " +
|
||||||
|
"channelID=\"" + str(channel_item['id']) + "\" " +
|
||||||
|
"tvg-chno=\"" + str(channel_item['number']) + "\" " +
|
||||||
|
"tvg-name=\"" + str(channel_item['name']) + "\" " +
|
||||||
|
"tvg-id=\"" + str(channel_item['number']) + "\" " +
|
||||||
|
"tvg-logo=\"" + logourl + "\" " +
|
||||||
|
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "," + str(channel_item['name']))
|
||||||
|
)
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
('%s/auto/v%s' %
|
||||||
|
(base_url, str(channel_item['number'])))
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
channels_m3u = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_m3u,
|
||||||
|
mimetype='text/plain')
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
@ -1,29 +1,67 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
class xmlTV_XML():
|
class xmlTV():
|
||||||
"""Methods to create xmltv.xml"""
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/xmltv", "/xmltv.xml"]
|
||||||
|
endpoint_name = "api_xmltv"
|
||||||
xmltv_xml = None
|
xmltv_xml = None
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
def __init__(self, fhdhr):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
self.device = device
|
|
||||||
|
|
||||||
def get_xmltv_xml(self, base_url, force_update=False):
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
epgdict = self.device.epg.get_epg()
|
def get(self, *args):
|
||||||
return self.create_xmltv(base_url, epgdict)
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||||
|
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
||||||
|
if DeviceAuth != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||||
|
return "not subscribed"
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
xmltv_xml = self.create_xmltv(base_url, epgdict)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=xmltv_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
|
|
||||||
def xmltv_headers(self):
|
def xmltv_headers(self):
|
||||||
"""This method creates the XML headers for our xmltv"""
|
"""This method creates the XML headers for our xmltv"""
|
||||||
xmltvgen = xml.etree.ElementTree.Element('tv')
|
xmltvgen = xml.etree.ElementTree.Element('tv')
|
||||||
xmltvgen.set('source-info-url', self.config.dict["fhdhr"]["friendlyname"])
|
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
xmltvgen.set('source-info-name', self.config.dict["main"]["servicename"])
|
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||||
xmltvgen.set('generator-info-name', 'fHDHR')
|
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||||
xmltvgen.set('generator-info-url', 'fHDHR/' + self.config.dict["main"]["reponame"])
|
xmltvgen.set('generator-info-url', 'fHDHR/' + self.fhdhr.config.dict["main"]["reponame"])
|
||||||
return xmltvgen
|
return xmltvgen
|
||||||
|
|
||||||
def xmltv_file(self, xmltvgen):
|
def xmltv_file(self, xmltvgen):
|
||||||
@ -51,16 +89,16 @@ class xmlTV_XML():
|
|||||||
sub_el(c_out, 'display-name',
|
sub_el(c_out, 'display-name',
|
||||||
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||||
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||||
sub_el(c_out, 'display-name',
|
|
||||||
text='%s %s fcc' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
|
||||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
|
||||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||||
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||||
|
|
||||||
if epgdict[c]["thumbnail"] is not None:
|
if epgdict[c]["thumbnail"] is not None:
|
||||||
sub_el(c_out, 'icon', src=("http://" + str(base_url) + "/images?source=epg&type=channel&id=" + str(epgdict[c]['id'])))
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=channel&id=" + str(epgdict[c]['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||||
else:
|
else:
|
||||||
sub_el(c_out, 'icon', src=("http://" + str(base_url) + "/images?source=generate&message=" + str(epgdict[c]['number'])))
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=channel&message=" + urllib.parse.quote(epgdict[c]['name'])))
|
||||||
|
|
||||||
for channelnum in list(epgdict.keys()):
|
for channelnum in list(epgdict.keys()):
|
||||||
|
|
||||||
@ -98,9 +136,12 @@ class xmlTV_XML():
|
|||||||
text='S%02dE%02d' % (s_, e_))
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
|
||||||
if program["thumbnail"]:
|
if program["thumbnail"]:
|
||||||
sub_el(prog_out, 'icon', src=("http://" + str(base_url) + "/images?source=epg&type=content&id=" + str(program['id'])))
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=content&id=" + str(program['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||||
else:
|
else:
|
||||||
sub_el(prog_out, 'icon', src=("http://" + str(base_url) + "/images?source=generate&message=" + program['title'].replace(" ", "")))
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=content&message=" + urllib.parse.quote(program['title'])))
|
||||||
|
|
||||||
if program['rating']:
|
if program['rating']:
|
||||||
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||||
31
fHDHR/http/files/__init__.py
Normal file
31
fHDHR/http/files/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .favicon_ico import Favicon_ICO
|
||||||
|
from .style_css import Style_CSS
|
||||||
|
|
||||||
|
from .device_xml import Device_XML
|
||||||
|
from .lineup_xml import Lineup_XML
|
||||||
|
|
||||||
|
from .discover_json import Discover_JSON
|
||||||
|
from .lineup_json import Lineup_JSON
|
||||||
|
from .lineup_status_json import Lineup_Status_JSON
|
||||||
|
|
||||||
|
from .watch import Watch
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Files():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
|
self.style = Style_CSS(fhdhr)
|
||||||
|
|
||||||
|
self.device_xml = Device_XML(fhdhr)
|
||||||
|
self.lineup_xml = Lineup_XML(fhdhr)
|
||||||
|
|
||||||
|
self.discover_json = Discover_JSON(fhdhr)
|
||||||
|
self.lineup_json = Lineup_JSON(fhdhr)
|
||||||
|
self.lineup_status_json = Lineup_Status_JSON(fhdhr)
|
||||||
|
|
||||||
|
self.watch = Watch(fhdhr)
|
||||||
47
fHDHR/http/files/device_xml.py
Normal file
47
fHDHR/http/files/device_xml.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Device_XML():
|
||||||
|
endpoints = ["/device.xml"]
|
||||||
|
endpoint_name = "device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('root')
|
||||||
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
|
sub_el(out, 'URLBase', base_url)
|
||||||
|
|
||||||
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
|
sub_el(specVersion_out, 'major', "1")
|
||||||
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
|
device_out = sub_el(out, 'device')
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
|
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||||
|
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
sub_el(device_out, 'UDN', "uuid:" + self.fhdhr.config.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
device_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=device_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
35
fHDHR/http/files/discover_json.py
Normal file
35
fHDHR/http/files/discover_json.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Discover_JSON():
|
||||||
|
endpoints = ["/discover.json"]
|
||||||
|
endpoint_name = "discover_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsondiscover = {
|
||||||
|
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||||
|
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||||
|
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||||
|
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||||
|
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||||
|
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||||
|
"BaseURL": base_url,
|
||||||
|
"LineupURL": base_url + "/lineup.json"
|
||||||
|
}
|
||||||
|
discover_json = json.dumps(jsondiscover, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=discover_json,
|
||||||
|
mimetype='application/json')
|
||||||
18
fHDHR/http/files/favicon_ico.py
Normal file
18
fHDHR/http/files/favicon_ico.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Favicon_ICO():
|
||||||
|
endpoints = ["/favicon.ico"]
|
||||||
|
endpoint_name = "favicon"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.dict["filedir"]["www_dir"],
|
||||||
|
'favicon.ico',
|
||||||
|
mimetype='image/vnd.microsoft.icon')
|
||||||
24
fHDHR/http/files/lineup_json.py
Normal file
24
fHDHR/http/files/lineup_json.py
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_JSON():
|
||||||
|
endpoints = ["/lineup.json"]
|
||||||
|
endpoint_name = "lineup_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsonlineup = self.fhdhr.device.channels.get_station_list(base_url)
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
46
fHDHR/http/files/lineup_status_json.py
Normal file
46
fHDHR/http/files/lineup_status_json.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Status_JSON():
|
||||||
|
endpoints = ["/lineup_status.json"]
|
||||||
|
endpoint_name = "lineup_status_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
station_scanning = self.fhdhr.device.station_scan.scanning()
|
||||||
|
if station_scanning:
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
elif not self.fhdhr.device.channels.get_station_total():
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
else:
|
||||||
|
jsonlineup = self.not_scanning()
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
def scan_in_progress(self):
|
||||||
|
channel_count = self.fhdhr.device.channels.get_station_total()
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": channel_count
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def not_scanning(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
37
fHDHR/http/files/lineup_xml.py
Normal file
37
fHDHR/http/files/lineup_xml.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_XML():
|
||||||
|
endpoints = ["/lineup.xml"]
|
||||||
|
endpoint_name = "lineup_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('Lineup')
|
||||||
|
station_list = self.fhdhr.device.channels.get_station_list(base_url)
|
||||||
|
for station_item in station_list:
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
|
||||||
|
sub_el(program_out, 'GuideName', station_item['GuideName'])
|
||||||
|
sub_el(program_out, 'URL', station_item['URL'])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
lineup_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
17
fHDHR/http/files/style_css.py
Normal file
17
fHDHR/http/files/style_css.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Style_CSS():
|
||||||
|
endpoints = ["/style.css"]
|
||||||
|
endpoint_name = "style"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.dict["filedir"]["www_dir"],
|
||||||
|
'style.css')
|
||||||
29
fHDHR/http/files/watch.py
Normal file
29
fHDHR/http/files/watch.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from flask import Response, request, stream_with_context, abort
|
||||||
|
|
||||||
|
|
||||||
|
class Watch():
|
||||||
|
endpoints = ['/auto/<channel>']
|
||||||
|
endpoint_name = "auto"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, channel, *args):
|
||||||
|
return self.get(channel, *args)
|
||||||
|
|
||||||
|
def get(self, channel, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
stream_args = {
|
||||||
|
"channel": channel.replace('v', ''),
|
||||||
|
"method": request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str),
|
||||||
|
"duration": request.args.get('duration', default=0, type=int),
|
||||||
|
"accessed": self.fhdhr.device.channels.get_fhdhr_stream_url(base_url, channel.replace('v', '')),
|
||||||
|
}
|
||||||
|
stream_args = self.fhdhr.device.watch.get_stream_info(stream_args)
|
||||||
|
if stream_args["channelUri"]:
|
||||||
|
if stream_args["method"] == "direct":
|
||||||
|
return Response(self.fhdhr.device.watch.get_stream(stream_args), content_type=stream_args["content_type"], direct_passthrough=True)
|
||||||
|
elif stream_args["method"] == "ffmpeg":
|
||||||
|
return Response(stream_with_context(self.fhdhr.device.watch.get_stream(stream_args)), mimetype="video/mpeg")
|
||||||
|
abort(503)
|
||||||
33
fHDHR/http/pages/__init__.py
Normal file
33
fHDHR/http/pages/__init__.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .htmlerror import HTMLerror
|
||||||
|
from .page_elements import fHDHR_Page_Elements
|
||||||
|
from .index_html import Index_HTML
|
||||||
|
from .origin_html import Origin_HTML
|
||||||
|
from .cluster_html import Cluster_HTML
|
||||||
|
from .diagnostics_html import Diagnostics_HTML
|
||||||
|
from .streams_html import Streams_HTML
|
||||||
|
from .version_html import Version_HTML
|
||||||
|
from .guide_html import Guide_HTML
|
||||||
|
from .xmltv_html import xmlTV_HTML
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Pages():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.page_elements = fHDHR_Page_Elements(fhdhr)
|
||||||
|
|
||||||
|
self.index = Index_HTML(fhdhr, self.page_elements)
|
||||||
|
|
||||||
|
self.htmlerror = HTMLerror(fhdhr)
|
||||||
|
|
||||||
|
self.index = Index_HTML(fhdhr, self.page_elements)
|
||||||
|
self.origin = Origin_HTML(fhdhr, self.page_elements)
|
||||||
|
self.cluster = Cluster_HTML(fhdhr, self.page_elements)
|
||||||
|
self.diagnostics = Diagnostics_HTML(fhdhr, self.page_elements)
|
||||||
|
self.version = Version_HTML(fhdhr, self.page_elements)
|
||||||
|
self.guide = Guide_HTML(fhdhr, self.page_elements)
|
||||||
|
self.streams = Streams_HTML(fhdhr, self.page_elements)
|
||||||
|
self.xmltv = xmlTV_HTML(fhdhr, self.page_elements)
|
||||||
88
fHDHR/http/pages/cluster_html.py
Normal file
88
fHDHR/http/pages/cluster_html.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
from flask import request
|
||||||
|
from io import StringIO
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster_HTML():
|
||||||
|
endpoints = ["/cluster", "/cluster.html"]
|
||||||
|
endpoint_name = "cluster"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, page_elements):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.page_elements = page_elements
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
|
for line in page_elements["top"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
fakefile.write("<h4 style=\"text-align: center;\">Cluster</h4>")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||||
|
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=scan&redirect=%2Fcluster", "Force Scan"))
|
||||||
|
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=disconnect&redirect=%2Fcluster", "Disconnect"))
|
||||||
|
fakefile.write("</div><br>\n")
|
||||||
|
|
||||||
|
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <th>Name</th>\n")
|
||||||
|
fakefile.write(" <th>Location</th>\n")
|
||||||
|
fakefile.write(" <th>Joined</th>\n")
|
||||||
|
fakefile.write(" <th>Options</th>\n")
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
|
||||||
|
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
||||||
|
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
locatation_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
location_info = locatation_info_req.json()
|
||||||
|
location_name = location_info["FriendlyName"]
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(location_name)))
|
||||||
|
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(location)))
|
||||||
|
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(fhdhr_list[location]["Joined"])))
|
||||||
|
|
||||||
|
fakefile.write(" <td>\n")
|
||||||
|
fakefile.write(" <div>\n")
|
||||||
|
location_url_query = urllib.parse.quote(location)
|
||||||
|
fakefile.write(
|
||||||
|
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||||
|
(location, "Visit"))
|
||||||
|
if not fhdhr_list[location]["Joined"]:
|
||||||
|
fakefile.write(
|
||||||
|
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||||
|
("/api/cluster?method=add&location=" + location_url_query + "&redirect=%2Fcluster", "Add"))
|
||||||
|
else:
|
||||||
|
fakefile.write(
|
||||||
|
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||||
|
("/api/cluster?method=del&location=" + location_url_query + "&redirect=%2Fcluster", "Remove"))
|
||||||
|
fakefile.write(" </div>\n")
|
||||||
|
fakefile.write(" </td>\n")
|
||||||
|
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
else:
|
||||||
|
fakefile.write("<p style=\"text-align: center;\">Discovery Address must be set for SSDP/Cluster</p>\n")
|
||||||
|
|
||||||
|
for line in page_elements["end"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
return fakefile.getvalue()
|
||||||
46
fHDHR/http/pages/diagnostics_html.py
Normal file
46
fHDHR/http/pages/diagnostics_html.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from flask import request
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class Diagnostics_HTML():
|
||||||
|
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||||
|
endpoint_name = "diagnostics"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, page_elements):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.diagnostics_html = None
|
||||||
|
self.page_elements = page_elements
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
|
for line in page_elements["top"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
# a list of 2 part lists containing button information
|
||||||
|
button_list = [
|
||||||
|
["debug.json", "/api/debug"],
|
||||||
|
["device.xml", "device.xml"],
|
||||||
|
["discover.json", "discover.json"],
|
||||||
|
["lineup.json", "lineup.json"],
|
||||||
|
["lineup_status.json", "lineup_status.json"],
|
||||||
|
["cluster.json", "/api/cluster?method=get"]
|
||||||
|
]
|
||||||
|
|
||||||
|
for button_item in button_list:
|
||||||
|
button_label = button_item[0]
|
||||||
|
button_path = button_item[1]
|
||||||
|
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||||
|
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||||
|
fakefile.write("</div>\n")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
for line in page_elements["end"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
return fakefile.getvalue()
|
||||||
@ -1,24 +1,29 @@
|
|||||||
|
from flask import request
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
import datetime
|
import datetime
|
||||||
|
|
||||||
from fHDHR.tools import humanized_time
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
|
||||||
class Channel_Guide_HTML():
|
class Guide_HTML():
|
||||||
|
endpoints = ["/guide", "/guide.html"]
|
||||||
|
endpoint_name = "guide"
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
def __init__(self, fhdhr, page_elements):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
self.page_elements = page_elements
|
||||||
|
|
||||||
def get_channel_guide_html(self, force_update=False):
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
friendlyname = self.config.dict["fhdhr"]["friendlyname"]
|
def get(self, *args):
|
||||||
|
|
||||||
|
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
|
||||||
nowtime = datetime.datetime.utcnow()
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
|
||||||
fakefile = StringIO()
|
fakefile = StringIO()
|
||||||
page_elements = self.page_elements.get()
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
for line in page_elements["top"]:
|
||||||
fakefile.write(line + "\n")
|
fakefile.write(line + "\n")
|
||||||
@ -26,6 +31,19 @@ class Channel_Guide_HTML():
|
|||||||
fakefile.write("<h4 id=\"mcetoc_1cdobsl3g0\" style=\"text-align: center;\"><span style=\"text-decoration: underline;\"><strong><em>What's On %s</em></strong></span></h4>\n" % friendlyname)
|
fakefile.write("<h4 id=\"mcetoc_1cdobsl3g0\" style=\"text-align: center;\"><span style=\"text-decoration: underline;\"><strong><em>What's On %s</em></strong></span></h4>\n" % friendlyname)
|
||||||
fakefile.write("\n")
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
# a list of 2 part lists containing button information
|
||||||
|
button_list = [
|
||||||
|
["Force xmlTV Update", "/api/xmltv?method=update&redirect=%2Fguide"],
|
||||||
|
]
|
||||||
|
|
||||||
|
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||||
|
for button_item in button_list:
|
||||||
|
button_label = button_item[0]
|
||||||
|
button_path = button_item[1]
|
||||||
|
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||||
|
fakefile.write("</div>\n")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
fakefile.write("<table style=\"width:100%\">\n")
|
fakefile.write("<table style=\"width:100%\">\n")
|
||||||
fakefile.write(" <tr>\n")
|
fakefile.write(" <tr>\n")
|
||||||
fakefile.write(" <th>Play</th>\n")
|
fakefile.write(" <th>Play</th>\n")
|
||||||
@ -38,10 +56,10 @@ class Channel_Guide_HTML():
|
|||||||
fakefile.write(" <th>Content Remaining Time</th>\n")
|
fakefile.write(" <th>Content Remaining Time</th>\n")
|
||||||
fakefile.write(" </tr>\n")
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
for channel in self.device.epg.whats_on_allchans():
|
for channel in self.fhdhr.device.epg.whats_on_allchans():
|
||||||
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
||||||
play_url = ("/%s.m3u\n" % (channel["number"]))
|
play_url = ("/api/m3u?method=get&channel=%s\n" % (channel["number"]))
|
||||||
|
|
||||||
fakefile.write(" <tr>\n")
|
fakefile.write(" <tr>\n")
|
||||||
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % (play_url, "Play"))
|
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % (play_url, "Play"))
|
||||||
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
class HTMLerror():
|
class HTMLerror():
|
||||||
def __init__(self, settings):
|
def __init__(self, fhdhr):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
def get_html_error(self, message):
|
def get_html_error(self, message):
|
||||||
htmlerror = """<html>
|
htmlerror = """<html>
|
||||||
@ -1,17 +1,22 @@
|
|||||||
|
from flask import request
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
class Index_HTML():
|
class Index_HTML():
|
||||||
|
endpoints = ["/", "/index.html"]
|
||||||
|
endpoint_name = "root"
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
def __init__(self, fhdhr, page_elements):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
self.page_elements = page_elements
|
||||||
|
|
||||||
def get_index_html(self, base_url, force_update=False):
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
fakefile = StringIO()
|
fakefile = StringIO()
|
||||||
page_elements = self.page_elements.get()
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
for line in page_elements["top"]:
|
||||||
fakefile.write(line + "\n")
|
fakefile.write(line + "\n")
|
||||||
@ -25,15 +30,15 @@ class Index_HTML():
|
|||||||
fakefile.write(" <th></th>\n")
|
fakefile.write(" <th></th>\n")
|
||||||
fakefile.write(" </tr>\n")
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
total_channels = self.device.channels.get_station_total()
|
total_channels = self.fhdhr.device.channels.get_station_total()
|
||||||
|
|
||||||
tuners_in_use = self.device.tuners.inuse_tuner_count()
|
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
||||||
max_tuners = self.device.tuners.max_tuners
|
max_tuners = self.fhdhr.device.tuners.max_tuners
|
||||||
|
|
||||||
tableguts = [
|
tableguts = [
|
||||||
["Script Directory", str(self.config.dict["filedir"]["script_dir"])],
|
["Script Directory", str(self.fhdhr.config.dict["filedir"]["script_dir"])],
|
||||||
["Config File", str(self.config.config_file)],
|
["Config File", str(self.fhdhr.config.config_file)],
|
||||||
["Cache Path", str(self.config.dict["filedir"]["cache_dir"])],
|
["Cache Path", str(self.fhdhr.config.dict["filedir"]["cache_dir"])],
|
||||||
["Total Channels", str(total_channels)],
|
["Total Channels", str(total_channels)],
|
||||||
["Tuner Usage", "%s/%s" % (str(tuners_in_use), str(max_tuners))]
|
["Tuner Usage", "%s/%s" % (str(tuners_in_use), str(max_tuners))]
|
||||||
]
|
]
|
||||||
64
fHDHR/http/pages/origin_html.py
Normal file
64
fHDHR/http/pages/origin_html.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
from flask import request
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class Origin_HTML():
|
||||||
|
endpoints = ["/origin", "/origin.html"]
|
||||||
|
endpoint_name = "origin"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, page_elements):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.page_elements = page_elements
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
|
for line in page_elements["top"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
fakefile.write("<h4 style=\"text-align: center;\">%s Status</h4>" % (servicename))
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
# a list of 2 part lists containing button information
|
||||||
|
button_list = [
|
||||||
|
["Force Channel Update", "/api/channels?method=scan&redirect=%2Forigin"],
|
||||||
|
]
|
||||||
|
|
||||||
|
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||||
|
for button_item in button_list:
|
||||||
|
button_label = button_item[0]
|
||||||
|
button_path = button_item[1]
|
||||||
|
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||||
|
fakefile.write("</div>\n")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <th></th>\n")
|
||||||
|
fakefile.write(" <th></th>\n")
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
origin_status_dict = self.fhdhr.device.channels.get_origin_status()
|
||||||
|
for key in list(origin_status_dict.keys()):
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(key)))
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(origin_status_dict[key])))
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
total_channels = self.fhdhr.device.channels.get_station_total()
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % ("Total Channels"))
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(total_channels)))
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
for line in page_elements["end"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
return fakefile.getvalue()
|
||||||
@ -1,28 +1,17 @@
|
|||||||
# pylama:ignore=W0611
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
from .htmlerror import HTMLerror
|
|
||||||
from .index_html import Index_HTML
|
|
||||||
from .origin_html import Origin_HTML
|
|
||||||
from .cluster_html import Cluster_HTML
|
|
||||||
from .diagnostics_html import Diagnostics_HTML
|
|
||||||
from .streams_html import Streams_HTML
|
|
||||||
from .version_html import Version_HTML
|
|
||||||
from .channel_guide_html import Channel_Guide_HTML
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Page_Elements():
|
class fHDHR_Page_Elements():
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
def __init__(self, fhdhr):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
self.device = device
|
self.location = self.fhdhr.device.cluster.location
|
||||||
|
|
||||||
def get(self):
|
def get(self, request):
|
||||||
return {"top": self.pagetop(), "end": self.pageend()}
|
return {"top": self.pagetop(request), "end": self.pageend(request)}
|
||||||
|
|
||||||
def pagetop(self):
|
def pagetop(self, request):
|
||||||
friendlyname = self.config.dict["fhdhr"]["friendlyname"]
|
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
servicename = str(self.config.dict["main"]["servicename"])
|
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
|
||||||
|
|
||||||
upper_part = [
|
upper_part = [
|
||||||
"<!DOCTYPE html>",
|
"<!DOCTYPE html>",
|
||||||
@ -38,33 +27,38 @@ class fHDHR_Page_Elements():
|
|||||||
"</style>",
|
"</style>",
|
||||||
"<link href=\"style.css\" rel=\"stylesheet\">",
|
"<link href=\"style.css\" rel=\"stylesheet\">",
|
||||||
"</head>",
|
"</head>",
|
||||||
"<h1 id=\"mcetoc_1cdobsl3g0\" style=\"text-align: center;\"><span style=\"text-decoration: underline;\"><strong><em>%s</em></strong></span></h1><br>" % friendlyname,
|
"<h1 style=\"text-align: center;\">",
|
||||||
"",
|
"<span style=\"text-decoration: underline;\"><strong><em>%s</em></strong>" % friendlyname,
|
||||||
|
"</span>",
|
||||||
|
"<img class=\"pull-left\" src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % ("/favicon.ico", "fHDHR Logo"),
|
||||||
|
"</h1>"
|
||||||
|
"<br><br>",
|
||||||
"<h2>"
|
"<h2>"
|
||||||
"<div>",
|
"<div>",
|
||||||
|
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/", "fHDHR"),
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/", "fHDHR"),
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/origin", servicename),
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/origin", servicename),
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/guide", "Guide"),
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/guide", "Guide"),
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/version", "Version"),
|
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/diagnostics", "Diagnostics"),
|
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/cluster", "Cluster"),
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/cluster", "Cluster"),
|
||||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/streams", "Streams"),
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/streams", "Streams"),
|
||||||
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/xmltv", "xmltv"),
|
||||||
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/version", "Version"),
|
||||||
|
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/diagnostics", "Diagnostics"),
|
||||||
|
|
||||||
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("xmltv.xml", "xmltv"),
|
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/xmltv?method=get&source=origin", "xmltv"),
|
||||||
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("channels.m3u", "m3u"),
|
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/m3u?method=get&channel=all", "m3u"),
|
||||||
|
|
||||||
"</div>",
|
"</div>",
|
||||||
"<hr align=\"center\" width=\"100%\">"
|
"<hr align=\"center\" width=\"100%\">"
|
||||||
]
|
]
|
||||||
fhdhr_list = self.device.cluster.cluster
|
fhdhr_list = self.fhdhr.device.cluster.cluster()
|
||||||
locations = []
|
locations = []
|
||||||
for location in list(fhdhr_list.keys()):
|
for location in list(fhdhr_list.keys()):
|
||||||
item_dict = {
|
item_dict = {
|
||||||
"base_url": fhdhr_list[location]["base_url"],
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
"name": fhdhr_list[location]["name"]
|
"name": fhdhr_list[location]["name"]
|
||||||
}
|
}
|
||||||
if item_dict["name"] != friendlyname:
|
if item_dict["base_url"] != self.location:
|
||||||
locations.append(item_dict)
|
locations.append(item_dict)
|
||||||
if len(locations):
|
if len(locations):
|
||||||
upper_part.append("<div>")
|
upper_part.append("<div>")
|
||||||
@ -74,9 +68,13 @@ class fHDHR_Page_Elements():
|
|||||||
upper_part.append("</div>")
|
upper_part.append("</div>")
|
||||||
upper_part.append("<hr align=\"center\" width=\"100%\">")
|
upper_part.append("<hr align=\"center\" width=\"100%\">")
|
||||||
|
|
||||||
|
retmessage = request.args.get('retmessage', default=None, type=str)
|
||||||
|
if retmessage:
|
||||||
|
upper_part.append("<p>%s</p>" % retmessage)
|
||||||
|
|
||||||
return upper_part
|
return upper_part
|
||||||
|
|
||||||
def pageend(self):
|
def pageend(self, request):
|
||||||
return [
|
return [
|
||||||
"</html>",
|
"</html>",
|
||||||
"",
|
"",
|
||||||
@ -87,22 +85,3 @@ class fHDHR_Page_Elements():
|
|||||||
"}",
|
"}",
|
||||||
"</script>"
|
"</script>"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Pages():
|
|
||||||
|
|
||||||
def __init__(self, settings, device):
|
|
||||||
self.config = settings
|
|
||||||
self.device = device
|
|
||||||
|
|
||||||
self.page_elements = fHDHR_Page_Elements(settings, device)
|
|
||||||
|
|
||||||
self.htmlerror = HTMLerror(settings)
|
|
||||||
|
|
||||||
self.index = Index_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.origin = Origin_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.cluster = Cluster_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.diagnostics = Diagnostics_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.version = Version_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.channel_guide = Channel_Guide_HTML(settings, self.device, self.page_elements)
|
|
||||||
self.streams = Streams_HTML(settings, self.device, self.page_elements)
|
|
||||||
@ -1,21 +1,29 @@
|
|||||||
|
from flask import request
|
||||||
from io import StringIO
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
class Streams_HTML():
|
class Streams_HTML():
|
||||||
|
endpoints = ["/streams", "/streams.html"]
|
||||||
|
endpoint_name = "streams"
|
||||||
|
|
||||||
def __init__(self, settings, device, page_elements):
|
def __init__(self, fhdhr, page_elements):
|
||||||
self.config = settings
|
self.fhdhr = fhdhr
|
||||||
self.device = device
|
|
||||||
self.page_elements = page_elements
|
self.page_elements = page_elements
|
||||||
|
|
||||||
def get_streams_html(self, base_url, force_update=False):
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
fakefile = StringIO()
|
fakefile = StringIO()
|
||||||
page_elements = self.page_elements.get()
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
for line in page_elements["top"]:
|
for line in page_elements["top"]:
|
||||||
fakefile.write(line + "\n")
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Streams</h4>")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
fakefile.write("<table style=\"width:100%\">\n")
|
fakefile.write("<table style=\"width:100%\">\n")
|
||||||
fakefile.write(" <tr>\n")
|
fakefile.write(" <tr>\n")
|
||||||
fakefile.write(" <th>Tuner</th>\n")
|
fakefile.write(" <th>Tuner</th>\n")
|
||||||
@ -25,7 +33,7 @@ class Streams_HTML():
|
|||||||
fakefile.write(" <th>Time Active</th>\n")
|
fakefile.write(" <th>Time Active</th>\n")
|
||||||
fakefile.write(" </tr>\n")
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
tuner_status = self.device.tuners.status()
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
for tuner in list(tuner_status.keys()):
|
for tuner in list(tuner_status.keys()):
|
||||||
fakefile.write(" <tr>\n")
|
fakefile.write(" <tr>\n")
|
||||||
fakefile.write(" <td>%s</td>\n" % (str(tuner)))
|
fakefile.write(" <td>%s</td>\n" % (str(tuner)))
|
||||||
53
fHDHR/http/pages/version_html.py
Normal file
53
fHDHR/http/pages/version_html.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
import sys
|
||||||
|
from flask import request
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class Version_HTML():
|
||||||
|
endpoints = ["/version", "/version.html"]
|
||||||
|
endpoint_name = "version"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, page_elements):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.page_elements = page_elements
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
|
for line in page_elements["top"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Version Information</h4>")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <th></th>\n")
|
||||||
|
fakefile.write(" <th></th>\n")
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % ("fHDHR"))
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.version)))
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % ("Python"))
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(sys.version)))
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % ("ffmpeg"))
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.config.dict["ffmpeg"]["version"])))
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
for line in page_elements["end"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
return fakefile.getvalue()
|
||||||
56
fHDHR/http/pages/xmltv_html.py
Normal file
56
fHDHR/http/pages/xmltv_html.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from flask import request
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV_HTML():
|
||||||
|
endpoints = ["/xmltv"]
|
||||||
|
endpoint_name = "xmltv"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, page_elements):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.page_elements = page_elements
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
page_elements = self.page_elements.get(request)
|
||||||
|
|
||||||
|
for line in page_elements["top"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
fakefile.write("<h4 style=\"text-align: center;\">fHDHR xmltv Options</h4>")
|
||||||
|
fakefile.write("\n")
|
||||||
|
|
||||||
|
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <th>Version</th>\n")
|
||||||
|
fakefile.write(" <th>Link</th>\n")
|
||||||
|
fakefile.write(" <th>Options</th>\n")
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
for epg_method in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
if epg_method not in [None, "None"]:
|
||||||
|
epg_method_name = epg_method
|
||||||
|
if epg_method == "origin":
|
||||||
|
epg_method_name = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
fakefile.write(" <tr>\n")
|
||||||
|
fakefile.write(" <td>%s</td>\n" % (epg_method_name))
|
||||||
|
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % ("/api/xmltv?method=get&source=" + epg_method, epg_method_name))
|
||||||
|
|
||||||
|
fakefile.write(" <td>\n")
|
||||||
|
fakefile.write(" <div>\n")
|
||||||
|
fakefile.write(
|
||||||
|
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||||
|
("/api/xmltv?method=update&source=" + epg_method + "&redirect=%2Fxmltv", "Update"))
|
||||||
|
fakefile.write(" </div>\n")
|
||||||
|
fakefile.write(" </td>\n")
|
||||||
|
|
||||||
|
fakefile.write(" </tr>\n")
|
||||||
|
|
||||||
|
for line in page_elements["end"]:
|
||||||
|
fakefile.write(line + "\n")
|
||||||
|
|
||||||
|
return fakefile.getvalue()
|
||||||
@ -1,3 +1,89 @@
|
|||||||
# pylama:ignore=W0611
|
from .origin_service import OriginService
|
||||||
from . import origin_channels
|
from .origin_channels import OriginChannels
|
||||||
from . import origin_epg
|
from .origin_epg import OriginEPG
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginEPG_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict, allchandict):
|
||||||
|
return [{"number": chandict["number"], "stream_url": None}], False
|
||||||
|
|
||||||
|
|
||||||
|
class OriginServiceWrapper():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, web, db):
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.web = web
|
||||||
|
|
||||||
|
self.servicename = settings.dict["main"]["servicename"]
|
||||||
|
|
||||||
|
self.setup_success = None
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.origin = OriginService(self.config, self.logger, self.web)
|
||||||
|
self.setup_success = True
|
||||||
|
self.logger.info("%s Setup Success" % self.servicename)
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.logger.error(e)
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
self.channels = OriginChannels(self.config, self.origin, self.logger, self.web)
|
||||||
|
self.epg = OriginEPG(self.config, self.logger, self.web)
|
||||||
|
else:
|
||||||
|
self.channels = OriginChannels_StandIN()
|
||||||
|
self.epg = OriginEPG_StandIN()
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return self.channels.get_channels()
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict, allchandict):
|
||||||
|
return self.channels.get_channel_stream(chandict, allchandict)
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return self.epg.update_epg(channels)
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
status_dict = {
|
||||||
|
"Setup": "Success",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_status_dict = self.origin.get_status_dict()
|
||||||
|
for status_key in list(full_status_dict.keys()):
|
||||||
|
status_dict[status_key] = full_status_dict[status_key]
|
||||||
|
return status_dict
|
||||||
|
except AttributeError:
|
||||||
|
return status_dict
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"Setup": "Failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.origin, name):
|
||||||
|
return eval("self.origin." + name)
|
||||||
|
elif hasattr(self.channels, name):
|
||||||
|
return eval("self.channels." + name)
|
||||||
|
|||||||
@ -1,75 +1,14 @@
|
|||||||
import xmltodict
|
import xmltodict
|
||||||
import json
|
import json
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import fHDHR.tools
|
|
||||||
import fHDHR.exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class OriginService():
|
class OriginChannels():
|
||||||
|
|
||||||
def __init__(self, settings):
|
def __init__(self, settings, origin, logger, web):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
|
self.origin = origin
|
||||||
self.web = fHDHR.tools.WebReq()
|
self.logger = logger
|
||||||
self.login()
|
self.web = web
|
||||||
|
|
||||||
def login(self):
|
|
||||||
print("Logging into NextPVR")
|
|
||||||
self.sid = self.get_sid()
|
|
||||||
if not self.sid:
|
|
||||||
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
|
|
||||||
else:
|
|
||||||
print("NextPVR Login Success")
|
|
||||||
self.config.write(self.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
|
||||||
|
|
||||||
def get_sid(self):
|
|
||||||
if self.config.dict["origin"]["sid"]:
|
|
||||||
return self.config.dict["origin"]["sid"]
|
|
||||||
|
|
||||||
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
|
|
||||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.config.dict["origin"]["address"],
|
|
||||||
str(self.config.dict["origin"]["port"]),
|
|
||||||
))
|
|
||||||
|
|
||||||
initiate_req = self.web.session.get(initiate_url)
|
|
||||||
initiate_dict = xmltodict.parse(initiate_req.content)
|
|
||||||
|
|
||||||
sid = initiate_dict['rsp']['sid']
|
|
||||||
salt = initiate_dict['rsp']['salt']
|
|
||||||
md5PIN = hashlib.md5(str(self.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
|
||||||
string = ':%s:%s' % (md5PIN, salt)
|
|
||||||
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
|
||||||
|
|
||||||
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
|
|
||||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.config.dict["origin"]["address"],
|
|
||||||
str(self.config.dict["origin"]["port"]),
|
|
||||||
sid,
|
|
||||||
clientKey
|
|
||||||
))
|
|
||||||
login_req = self.web.session.get(login_url)
|
|
||||||
login_dict = xmltodict.parse(login_req.content)
|
|
||||||
|
|
||||||
loginsuccess = None
|
|
||||||
if login_dict['rsp']['@stat'] == "ok":
|
|
||||||
if login_dict['rsp']['allow_watch'] == "true":
|
|
||||||
loginsuccess = sid
|
|
||||||
|
|
||||||
return loginsuccess
|
|
||||||
|
|
||||||
def get_status_dict(self):
|
|
||||||
nextpvr_address = ('%s%s:%s' %
|
|
||||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.config.dict["origin"]["address"],
|
|
||||||
str(self.config.dict["origin"]["port"]),
|
|
||||||
))
|
|
||||||
ret_status_dict = {
|
|
||||||
"Login": "Success",
|
|
||||||
"Address": nextpvr_address,
|
|
||||||
}
|
|
||||||
return ret_status_dict
|
|
||||||
|
|
||||||
def get_channels(self):
|
def get_channels(self):
|
||||||
|
|
||||||
@ -77,14 +16,14 @@ class OriginService():
|
|||||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||||
self.config.dict["origin"]["address"],
|
self.config.dict["origin"]["address"],
|
||||||
str(self.config.dict["origin"]["port"]),
|
str(self.config.dict["origin"]["port"]),
|
||||||
self.sid
|
self.origin.sid
|
||||||
))
|
))
|
||||||
|
|
||||||
data_req = self.web.session.get(data_url)
|
data_req = self.origin.web.session.get(data_url)
|
||||||
data_dict = xmltodict.parse(data_req.content)
|
data_dict = xmltodict.parse(data_req.content)
|
||||||
|
|
||||||
if 'channels' not in list(data_dict['rsp'].keys()):
|
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||||
print("Could not retrieve channel list")
|
self.logger.error("Could not retrieve channel list")
|
||||||
return []
|
return []
|
||||||
|
|
||||||
channel_o_list = data_dict['rsp']['channels']['channel']
|
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||||
|
|||||||
@ -4,13 +4,12 @@ import xmltodict
|
|||||||
import fHDHR.tools
|
import fHDHR.tools
|
||||||
|
|
||||||
|
|
||||||
class originEPG():
|
class OriginEPG():
|
||||||
|
|
||||||
def __init__(self, settings, channels):
|
def __init__(self, settings, logger, web):
|
||||||
self.config = settings
|
self.config = settings
|
||||||
self.channels = channels
|
self.logger = logger
|
||||||
|
self.web = web
|
||||||
self.web = fHDHR.tools.WebReq()
|
|
||||||
|
|
||||||
def get_channel_thumbnail(self, channel_id):
|
def get_channel_thumbnail(self, channel_id):
|
||||||
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||||
@ -39,10 +38,10 @@ class originEPG():
|
|||||||
def duration_nextpvr_minutes(self, starttime, endtime):
|
def duration_nextpvr_minutes(self, starttime, endtime):
|
||||||
return ((int(endtime) - int(starttime))/1000/60)
|
return ((int(endtime) - int(starttime))/1000/60)
|
||||||
|
|
||||||
def update_epg(self):
|
def update_epg(self, fhdhr_channels):
|
||||||
programguide = {}
|
programguide = {}
|
||||||
|
|
||||||
for c in self.channels.get_channels():
|
for c in fhdhr_channels.get_channels():
|
||||||
|
|
||||||
cdict = fHDHR.tools.xmldictmaker(c, ["callsign", "name", "number", "id"])
|
cdict = fHDHR.tools.xmldictmaker(c, ["callsign", "name", "number", "id"])
|
||||||
|
|
||||||
|
|||||||
71
fHDHR/origin/origin_service.py
Normal file
71
fHDHR/origin/origin_service.py
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
import xmltodict
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginService():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, web):
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.web = web
|
||||||
|
self.login()
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
self.logger.info("Logging into NextPVR")
|
||||||
|
self.sid = self.get_sid()
|
||||||
|
if not self.sid:
|
||||||
|
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
|
||||||
|
else:
|
||||||
|
self.logger.info("NextPVR Login Success")
|
||||||
|
self.config.write(self.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
||||||
|
|
||||||
|
def get_sid(self):
|
||||||
|
if self.config.dict["origin"]["sid"]:
|
||||||
|
return self.config.dict["origin"]["sid"]
|
||||||
|
|
||||||
|
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
|
||||||
|
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.config.dict["origin"]["address"],
|
||||||
|
str(self.config.dict["origin"]["port"]),
|
||||||
|
))
|
||||||
|
|
||||||
|
initiate_req = self.web.session.get(initiate_url)
|
||||||
|
initiate_dict = xmltodict.parse(initiate_req.content)
|
||||||
|
|
||||||
|
sid = initiate_dict['rsp']['sid']
|
||||||
|
salt = initiate_dict['rsp']['salt']
|
||||||
|
md5PIN = hashlib.md5(str(self.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
||||||
|
string = ':%s:%s' % (md5PIN, salt)
|
||||||
|
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
|
||||||
|
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.config.dict["origin"]["address"],
|
||||||
|
str(self.config.dict["origin"]["port"]),
|
||||||
|
sid,
|
||||||
|
clientKey
|
||||||
|
))
|
||||||
|
login_req = self.web.session.get(login_url)
|
||||||
|
login_dict = xmltodict.parse(login_req.content)
|
||||||
|
|
||||||
|
loginsuccess = None
|
||||||
|
if login_dict['rsp']['@stat'] == "ok":
|
||||||
|
if login_dict['rsp']['allow_watch'] == "true":
|
||||||
|
loginsuccess = sid
|
||||||
|
|
||||||
|
return loginsuccess
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
nextpvr_address = ('%s%s:%s' %
|
||||||
|
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.config.dict["origin"]["address"],
|
||||||
|
str(self.config.dict["origin"]["port"]),
|
||||||
|
))
|
||||||
|
ret_status_dict = {
|
||||||
|
"Login": "Success",
|
||||||
|
"Address": nextpvr_address,
|
||||||
|
}
|
||||||
|
return ret_status_dict
|
||||||
@ -112,3 +112,4 @@ def humanized_time(countdownseconds):
|
|||||||
class WebReq():
|
class WebReq():
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
self.exceptions = requests.exceptions
|
||||||
|
|||||||
@ -3,3 +3,4 @@ gevent
|
|||||||
flask
|
flask
|
||||||
image
|
image
|
||||||
xmltodict
|
xmltodict
|
||||||
|
sqlalchemy
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user