mirror of
https://github.com/fHDHR/fHDHR_NextPVR.git
synced 2025-12-06 07:46:58 -05:00
Better Loading process and Code Cleanup
This commit is contained in:
parent
eeb13ea977
commit
a831a07a59
@ -9,4 +9,4 @@ COPY ./ /app/
|
||||
WORKDIR /app
|
||||
RUN pip3 install -r requirements.txt
|
||||
|
||||
ENTRYPOINT ["python3", "/app/main.py", "--config_file", "/app/config/config.ini"]
|
||||
ENTRYPOINT ["python3", "/app/main.py", "--c", "/app/config/config.ini"]
|
||||
|
||||
@ -1,11 +1,12 @@
|
||||
# FakeHDHR_NextPVR
|
||||
# fHDHR_NextPVR
|
||||
|
||||
|
||||
Welcome to the world of streaming to Plex! We use some fancy python here to achieve a system of:
|
||||
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**H**iatus &
|
||||
**R**ecreation
|
||||
|
||||
|
||||
|
||||
@ -13,14 +13,14 @@
|
||||
# weight = 300
|
||||
# epg_update_frequency = 43200
|
||||
|
||||
[fakehdhr]
|
||||
[fhdhr]
|
||||
# address = 0.0.0.0
|
||||
# port = 5004
|
||||
# discovery_address = 0.0.0.0
|
||||
# tuner_count = 3
|
||||
# friendlyname = fHDHR-Locast
|
||||
# stream_type = ffmpeg
|
||||
# epg_method = proxy
|
||||
# epg_method = origin
|
||||
|
||||
[zap2it]
|
||||
# delay = 5
|
||||
|
||||
@ -3,7 +3,7 @@ address = localhost
|
||||
port = 8866
|
||||
pin =
|
||||
|
||||
[fakehdhr]
|
||||
[fhdhr]
|
||||
# address = 0.0.0.0
|
||||
# port = 5004
|
||||
# discovery_address = 0.0.0.0
|
||||
|
||||
@ -1,11 +1,11 @@
|
||||
[main]
|
||||
uuid = fHDHR_None
|
||||
cache_dir = fHDHR_None
|
||||
uuid =
|
||||
cache_dir =
|
||||
|
||||
[empty]
|
||||
[blocks]
|
||||
epg_update_frequency = 43200
|
||||
|
||||
[fakehdhr]
|
||||
[fhdhr]
|
||||
address = 0.0.0.0
|
||||
port = 5004
|
||||
discovery_address = 0.0.0.0
|
||||
@ -18,7 +18,7 @@ bytes_per_read = 1152000
|
||||
chunksize = 1048576
|
||||
|
||||
[dev]
|
||||
reporting_model = HDHR4-2DT
|
||||
reporting_firmware_name = hdhomerun4_dvbt
|
||||
reporting_firmware_ver = 20150826
|
||||
reporting_manufacturer = BoronDust
|
||||
reporting_model = fHDHR
|
||||
reporting_firmware_ver = 20201001
|
||||
reporting_tuner_type = Antenna
|
||||
@ -1,27 +1,31 @@
|
||||
[dev]
|
||||
reporting_firmware_name = fHDHR_NextPVR
|
||||
|
||||
[main]
|
||||
servicename = NextPVR
|
||||
dictpopname = nextpvr
|
||||
credentials = pin
|
||||
reponame = FakeHDHR_NextPVR
|
||||
reponame = fHDHR_NextPVR
|
||||
required = nextpvr/pin
|
||||
valid_epg_methods = None,blocks,origin,zap2it
|
||||
|
||||
[fakehdhr]
|
||||
[fhdhr]
|
||||
friendlyname = fHDHR-NextPVR
|
||||
stream_type = direct
|
||||
epg_method = proxy
|
||||
epg_method = origin
|
||||
tuner_count = 4
|
||||
|
||||
[nextpvr]
|
||||
address = localhost
|
||||
port = 8866
|
||||
ssl = fHDHR_False
|
||||
pin = fHDHR_None
|
||||
ssl = False
|
||||
pin =
|
||||
weight = 300
|
||||
sidfile = fHDHR_None
|
||||
epg_update_frequency = 43200
|
||||
sid =
|
||||
|
||||
[zap2it]
|
||||
delay = 5
|
||||
postalcode = fHDHR_None
|
||||
postalcode =
|
||||
affiliate_id = gapzap
|
||||
country = USA
|
||||
device = -
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 1.4 KiB |
Binary file not shown.
|
Before Width: | Height: | Size: 1.4 KiB |
@ -1,139 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
from io import BytesIO
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
from . import epgtypes
|
||||
|
||||
|
||||
def sub_el(parent, name, text=None, **kwargs):
|
||||
el = ET.SubElement(parent, name, **kwargs)
|
||||
if text:
|
||||
el.text = text
|
||||
return el
|
||||
|
||||
|
||||
def clean_exit():
|
||||
sys.stderr.flush()
|
||||
sys.stdout.flush()
|
||||
os._exit(0)
|
||||
|
||||
|
||||
class EPGhandler():
|
||||
|
||||
def __init__(self, config, serviceproxy):
|
||||
self.config = config.copy()
|
||||
self.epgtypes = epgtypes.EPGTypes(config, serviceproxy)
|
||||
|
||||
def get_xmltv(self, base_url):
|
||||
epgdict = self.epgtypes.get_epg()
|
||||
if not epgdict:
|
||||
return self.dummyxml()
|
||||
|
||||
epg_method = self.config["fakehdhr"]["epg_method"]
|
||||
|
||||
out = ET.Element('tv')
|
||||
out.set('source-info-url', self.config["fakehdhr"]["friendlyname"])
|
||||
out.set('source-info-name', self.config["main"]["servicename"])
|
||||
out.set('generator-info-name', 'FAKEHDHR')
|
||||
out.set('generator-info-url', 'FAKEHDHR/' + self.config["main"]["reponame"])
|
||||
|
||||
for c in list(epgdict.keys()):
|
||||
|
||||
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||
sub_el(c_out, 'display-name',
|
||||
text='%s %s' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||
sub_el(c_out, 'display-name',
|
||||
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||
sub_el(c_out, 'display-name',
|
||||
text='%s %s fcc' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||
|
||||
if epgdict[c]["thumbnail"] is not None:
|
||||
channel_thumbnail = self.epgtypes.thumb_url(epg_method, "channel", base_url, str(epgdict[c]['thumbnail']))
|
||||
sub_el(c_out, 'icon', src=(str(channel_thumbnail)))
|
||||
else:
|
||||
sub_el(c_out, 'icon', src=("http://" + str(base_url) + "/images?source=empty&type=channel&id=" + c['number']))
|
||||
|
||||
for progitem in list(epgdict.keys()):
|
||||
|
||||
channel_listing = epgdict[progitem]['listing']
|
||||
|
||||
for program in channel_listing:
|
||||
|
||||
prog_out = sub_el(out, 'programme',
|
||||
start=program['time_start'],
|
||||
stop=program['time_end'],
|
||||
channel=str(progitem))
|
||||
|
||||
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||
|
||||
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||
|
||||
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||
|
||||
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||
|
||||
for f in program['genres']:
|
||||
sub_el(prog_out, 'category', lang='en', text=f)
|
||||
sub_el(prog_out, 'genre', lang='en', text=f)
|
||||
|
||||
if program['seasonnumber'] and program['episodenumber']:
|
||||
s_ = int(str(program['seasonnumber']), 10)
|
||||
e_ = int(str(program['episodenumber']), 10)
|
||||
sub_el(prog_out, 'episode-num', system='dd_progid',
|
||||
text=str(program['id']))
|
||||
sub_el(prog_out, 'episode-num', system='common',
|
||||
text='S%02dE%02d' % (s_, e_))
|
||||
sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||
sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||
text='S%02dE%02d' % (s_, e_))
|
||||
|
||||
if program["thumbnail"] is not None:
|
||||
content_thumbnail = self.epgtypes.thumb_url(epg_method, "content", base_url, str(epgdict[c]['thumbnail']))
|
||||
sub_el(prog_out, 'icon', src=(str(content_thumbnail)))
|
||||
else:
|
||||
sub_el(prog_out, 'icon', src=("http://" + str(base_url) + "/images?source=empty&type=content&id=" + program['title']))
|
||||
|
||||
if program['rating']:
|
||||
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||
sub_el(rating_out, 'value', text=program['rating'])
|
||||
|
||||
if program['isnew']:
|
||||
sub_el(prog_out, 'new')
|
||||
|
||||
fakefile = BytesIO()
|
||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||
return fakefile.getvalue()
|
||||
|
||||
def dummyxml(self):
|
||||
out = ET.Element('tv')
|
||||
out.set('source-info-url', self.config["fakehdhr"]["friendlyname"])
|
||||
out.set('source-info-name', self.config["main"]["servicename"])
|
||||
out.set('generator-info-name', 'FAKEHDHR')
|
||||
out.set('generator-info-url', 'FAKEHDHR/' + self.config["main"]["reponame"])
|
||||
|
||||
fakefile = BytesIO()
|
||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||
return fakefile.getvalue()
|
||||
|
||||
|
||||
def epgServerProcess(config, epghandling):
|
||||
|
||||
sleeptime = int(config[config["fakehdhr"]["epg_method"]]["epg_update_frequency"])
|
||||
|
||||
try:
|
||||
|
||||
while True:
|
||||
epghandling.epgtypes.update()
|
||||
time.sleep(sleeptime)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
clean_exit()
|
||||
@ -1,28 +0,0 @@
|
||||
from . import zap2it
|
||||
from . import empty
|
||||
|
||||
|
||||
class EPGTypes():
|
||||
|
||||
def __init__(self, config, serviceproxy):
|
||||
self.config = config.copy()
|
||||
self.proxy = serviceproxy
|
||||
self.zap2it = zap2it.ZapEPG(config, serviceproxy)
|
||||
self.empty = empty.EmptyEPG(config, serviceproxy)
|
||||
|
||||
def get_epg(self):
|
||||
method_to_call = getattr(self, self.config["fakehdhr"]["epg_method"])
|
||||
func_to_call = getattr(method_to_call, 'epg_cache_open')
|
||||
epgdict = func_to_call()
|
||||
return epgdict
|
||||
|
||||
def thumb_url(self, epg_method, thumb_type, base_url, thumbnail):
|
||||
method_to_call = getattr(self, self.config["fakehdhr"]["epg_method"])
|
||||
func_to_call = getattr(method_to_call, 'thumb_url')
|
||||
thumbnail = func_to_call(thumb_type, base_url, thumbnail)
|
||||
return thumbnail
|
||||
|
||||
def update(self):
|
||||
method_to_call = getattr(self, self.config["fakehdhr"]["epg_method"])
|
||||
func_to_call = getattr(method_to_call, 'update_epg')
|
||||
func_to_call()
|
||||
2
fHDHR/__init__.py
Normal file
2
fHDHR/__init__.py
Normal file
@ -0,0 +1,2 @@
|
||||
# coding=utf-8
|
||||
fHDHR_VERSION = "v0.2.0-beta"
|
||||
0
fHDHR/cli/__init__.py
Normal file
0
fHDHR/cli/__init__.py
Normal file
96
fHDHR/cli/run.py
Normal file
96
fHDHR/cli/run.py
Normal file
@ -0,0 +1,96 @@
|
||||
import os
|
||||
import sys
|
||||
import time
|
||||
import argparse
|
||||
from multiprocessing import Process
|
||||
|
||||
from fHDHR import fHDHR_VERSION, config, originservice, ssdpserver, epghandler, fHDHRerrors, fHDHRweb
|
||||
|
||||
ERR_CODE = 1
|
||||
ERR_CODE_NO_RESTART = 2
|
||||
|
||||
|
||||
if sys.version_info.major == 2 or sys.version_info < (3, 3):
|
||||
print('Error: fHDHR requires python 3.3+.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def build_args_parser():
|
||||
"""Build argument parser for fHDHR"""
|
||||
print("Validating CLI Argument")
|
||||
parser = argparse.ArgumentParser(description='fHDHR')
|
||||
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_configuration(args, script_dir):
|
||||
if not os.path.isfile(args.cfg):
|
||||
raise config.ConfigurationNotFound(filename=args.cfg)
|
||||
return config.Config(args.cfg, script_dir)
|
||||
|
||||
|
||||
def get_originservice(settings):
|
||||
return originservice.OriginService(settings)
|
||||
|
||||
|
||||
def run(settings, origserv, epghandling):
|
||||
|
||||
if settings.dict["fhdhr"]["discovery_address"]:
|
||||
ssdpServer = Process(target=ssdpserver.ssdpServerProcess, args=(settings,))
|
||||
ssdpServer.start()
|
||||
|
||||
if settings.dict["fhdhr"]["epg_method"]:
|
||||
epgServer = Process(target=epghandler.epgServerProcess, args=(settings, epghandling))
|
||||
epgServer.start()
|
||||
|
||||
fhdhrweb = Process(target=fHDHRweb.interface_start, args=(settings, origserv, epghandling))
|
||||
fhdhrweb.start()
|
||||
|
||||
print(settings.dict["fhdhr"]["friendlyname"] + " is now running!")
|
||||
|
||||
# wait forever
|
||||
while True:
|
||||
time.sleep(3600)
|
||||
|
||||
return ERR_CODE
|
||||
|
||||
|
||||
def start(args, script_dir):
|
||||
"""Get Configuration for fHDHR and start"""
|
||||
|
||||
try:
|
||||
settings = get_configuration(args, script_dir)
|
||||
except fHDHRerrors.ConfigurationError as e:
|
||||
print(e)
|
||||
return ERR_CODE_NO_RESTART
|
||||
|
||||
try:
|
||||
origserv = get_originservice(settings)
|
||||
except fHDHRerrors.LoginError as e:
|
||||
print(e)
|
||||
return ERR_CODE_NO_RESTART
|
||||
|
||||
try:
|
||||
epghandling = epghandler.EPGhandler(settings, origserv)
|
||||
except fHDHRerrors.EPGSetupError as e:
|
||||
print(e)
|
||||
return ERR_CODE_NO_RESTART
|
||||
|
||||
return run(settings, origserv, epghandling)
|
||||
|
||||
|
||||
def main(script_dir):
|
||||
"""fHDHR run script entry point"""
|
||||
|
||||
print("Loading fHDHR " + fHDHR_VERSION)
|
||||
|
||||
try:
|
||||
args = build_args_parser()
|
||||
return start(args, script_dir)
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nInterrupted")
|
||||
return ERR_CODE
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
158
fHDHR/config/__init__.py
Normal file
158
fHDHR/config/__init__.py
Normal file
@ -0,0 +1,158 @@
|
||||
import os
|
||||
import random
|
||||
import configparser
|
||||
import pathlib
|
||||
|
||||
from fHDHR import fHDHRerrors
|
||||
from fHDHR.tools import isint, isfloat, is_arithmetic
|
||||
|
||||
|
||||
class Config():
|
||||
|
||||
def __init__(self, filename, script_dir):
|
||||
self.dict = {}
|
||||
self.config_file = filename
|
||||
self.parser = configparser.RawConfigParser(allow_no_value=True)
|
||||
|
||||
self.load_defaults(script_dir)
|
||||
|
||||
print("Loading Configuration File: " + str(self.config_file))
|
||||
self.read_config(self.config_file)
|
||||
|
||||
print("Verifying Configuration settings.")
|
||||
self.config_verification()
|
||||
|
||||
print("Server is set to run on " +
|
||||
str(self.dict["fhdhr"]["address"]) + ":" +
|
||||
str(self.dict["fhdhr"]["port"]))
|
||||
|
||||
def load_defaults(self, script_dir):
|
||||
|
||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||
|
||||
self.dict["filedir"] = {
|
||||
"script_dir": script_dir,
|
||||
"data_dir": data_dir,
|
||||
|
||||
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||
"www_dir": www_dir,
|
||||
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||
"favicon": pathlib.Path(data_dir).joinpath('favicon.ico'),
|
||||
"epg_cache": {},
|
||||
}
|
||||
|
||||
for conffile in os.listdir(self.dict["filedir"]["internal_config"]):
|
||||
conffilepath = os.path.join(self.dict["filedir"]["internal_config"], conffile)
|
||||
self.read_config(conffilepath)
|
||||
|
||||
def read_config(self, conffilepath):
|
||||
config_handler = configparser.ConfigParser()
|
||||
config_handler.read(conffilepath)
|
||||
for each_section in config_handler.sections():
|
||||
if each_section not in list(self.dict.keys()):
|
||||
self.dict[each_section] = {}
|
||||
for (each_key, each_val) in config_handler.items(each_section):
|
||||
if not each_val:
|
||||
each_val = None
|
||||
elif each_val.lower() in ["none", "false"]:
|
||||
each_val = False
|
||||
elif each_val.lower() in ["true"]:
|
||||
each_val = True
|
||||
elif isint(each_val):
|
||||
each_val = int(each_val)
|
||||
elif isfloat(each_val):
|
||||
each_val = float(each_val)
|
||||
elif is_arithmetic(each_val):
|
||||
each_val = eval(each_val)
|
||||
elif "," in each_val:
|
||||
each_val = each_val.split(",")
|
||||
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
def write(self, section, key, value):
|
||||
if section == self.dict["main"]["dictpopname"]:
|
||||
self.dict["origin"][key] = value
|
||||
else:
|
||||
self.dict[section][key] = value
|
||||
|
||||
config_handler = configparser.ConfigParser()
|
||||
config_handler.read(self.config_file)
|
||||
|
||||
if not config_handler.has_section(section):
|
||||
config_handler.add_section(section)
|
||||
|
||||
config_handler.set(section, key, value)
|
||||
|
||||
with open(self.config_file, 'w') as config_file:
|
||||
config_handler.write(config_file)
|
||||
|
||||
def config_verification(self):
|
||||
|
||||
if self.dict["main"]["required"]:
|
||||
required_missing = []
|
||||
if isinstance(self.dict["main"]["required"], str):
|
||||
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||
if len(self.dict["main"]["required"]):
|
||||
for req_item in self.dict["main"]["required"]:
|
||||
req_section = req_item.split("/")[0]
|
||||
req_key = req_item.split("/")[1]
|
||||
if not self.dict[req_section][req_key]:
|
||||
required_missing.append(req_item)
|
||||
if len(required_missing):
|
||||
raise fHDHRerrors.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
|
||||
|
||||
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||
|
||||
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
||||
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
||||
|
||||
if self.dict["fhdhr"]["epg_method"] and self.dict["fhdhr"]["epg_method"] not in ["None"]:
|
||||
if self.dict["fhdhr"]["epg_method"] == self.dict["main"]["dictpopname"]:
|
||||
self.dict["fhdhr"]["epg_method"] = "origin"
|
||||
elif self.dict["fhdhr"]["epg_method"] not in self.dict["main"]["valid_epg_methods"]:
|
||||
raise fHDHRerrors.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||
else:
|
||||
print("EPG Method not set, will not create EPG/xmltv")
|
||||
|
||||
# generate UUID here for when we are not using docker
|
||||
if not self.dict["main"]["uuid"]:
|
||||
print("No UUID found. Generating one now...")
|
||||
# from https://pynative.com/python-generate-random-string/
|
||||
# create a string that wouldn't be a real device uuid for
|
||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||
print("UUID set to: " + str(self.dict["main"]["uuid"]) + "...")
|
||||
else:
|
||||
print("UUID read as: " + str(self.dict["main"]["uuid"]) + "...")
|
||||
|
||||
if self.dict["main"]["cache_dir"]:
|
||||
print("Verifying cache directory...")
|
||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||
raise fHDHRerrors.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||
self.dict["filedir"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||
print("Cache set to " + str(self.dict["filedir"]["cache_dir"]))
|
||||
cache_dir = self.dict["filedir"]["cache_dir"]
|
||||
|
||||
for epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||
if epg_method and epg_method != "None":
|
||||
epg_cache_dir = pathlib.Path(cache_dir).joinpath(epg_method)
|
||||
if not epg_cache_dir.is_dir():
|
||||
epg_cache_dir.mkdir()
|
||||
if epg_method not in list(self.dict["filedir"]["epg_cache"].keys()):
|
||||
self.dict["filedir"]["epg_cache"][epg_method] = {}
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["top"] = epg_cache_dir
|
||||
epg_web_cache_dir = pathlib.Path(epg_cache_dir).joinpath("web_cache")
|
||||
if not epg_web_cache_dir.is_dir():
|
||||
epg_web_cache_dir.mkdir()
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["web_cache"] = epg_web_cache_dir
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["epg_json"] = pathlib.Path(epg_cache_dir).joinpath('epg.json')
|
||||
|
||||
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg"]:
|
||||
raise fHDHRerrors.ConfigurationError("Invalid stream type. Exiting...")
|
||||
|
||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = None
|
||||
print("SSDP Server disabled.")
|
||||
35
fHDHR/epghandler/__init__.py
Normal file
35
fHDHR/epghandler/__init__.py
Normal file
@ -0,0 +1,35 @@
|
||||
import time
|
||||
|
||||
from fHDHR.epghandler import epgtypes, xmltv
|
||||
|
||||
|
||||
class EPGhandler():
|
||||
|
||||
def __init__(self, settings, origserv):
|
||||
self.config = settings
|
||||
|
||||
self.epg_method = self.config.dict["fhdhr"]["epg_method"]
|
||||
if self.epg_method:
|
||||
self.sleeptime = self.config.dict[self.epg_method]["epg_update_frequency"]
|
||||
|
||||
self.epgtypes = epgtypes.EPGTypes(settings, origserv)
|
||||
self.xmltv = xmltv.xmlTV(settings)
|
||||
|
||||
def get_xmltv(self, base_url):
|
||||
epgdict = self.epgtypes.get_epg()
|
||||
return self.xmltv.create_xmltv(base_url, epgdict)
|
||||
|
||||
def get_thumbnail(self, itemtype, itemid):
|
||||
return self.epgtypes.get_thumbnail(itemtype, itemid)
|
||||
|
||||
|
||||
def epgServerProcess(settings, epghandling):
|
||||
print("Starting EPG thread...")
|
||||
try:
|
||||
|
||||
while True:
|
||||
epghandling.epgtypes.update()
|
||||
time.sleep(epghandling.sleeptime)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
58
fHDHR/epghandler/epgtypes/__init__.py
Normal file
58
fHDHR/epghandler/epgtypes/__init__.py
Normal file
@ -0,0 +1,58 @@
|
||||
import os
|
||||
import json
|
||||
from collections import OrderedDict
|
||||
|
||||
from fHDHR.epghandler.epgtypes import blocks, zap2it
|
||||
|
||||
|
||||
class EPGTypes():
|
||||
|
||||
def __init__(self, settings, origserv):
|
||||
self.config = settings
|
||||
self.origin = origserv
|
||||
|
||||
self.blocks = blocks.BlocksEPG(settings, origserv)
|
||||
self.zap2it = zap2it.ZapEPG(settings, origserv)
|
||||
|
||||
self.epg_method = self.config.dict["fhdhr"]["epg_method"]
|
||||
if self.epg_method:
|
||||
self.epg_cache_file = self.config.dict["filedir"]["epg_cache"][self.epg_method]["epg_json"]
|
||||
|
||||
self.epgtypename = self.epg_method
|
||||
if self.epg_method == self.config.dict["main"]["dictpopname"] or self.epg_method == "origin":
|
||||
self.epgtypename = self.config.dict["main"]["dictpopname"]
|
||||
|
||||
def get_epg(self):
|
||||
epgdict = None
|
||||
if os.path.isfile(self.epg_cache_file):
|
||||
with open(self.epg_cache_file, 'r') as epgfile:
|
||||
epgdict = json.load(epgfile)
|
||||
return epgdict
|
||||
|
||||
def get_thumbnail(self, itemtype, itemid):
|
||||
epgdict = self.get_epg()
|
||||
if itemtype == "channel":
|
||||
for channel in list(epgdict.keys()):
|
||||
if epgdict[channel]["id"] == itemid:
|
||||
return epgdict[channel]["thumbnail"]
|
||||
elif itemtype == "content":
|
||||
for channel in list(epgdict.keys()):
|
||||
for progitem in epgdict[channel]["listing"]:
|
||||
if progitem["id"] == itemid:
|
||||
return progitem["thumbnail"]
|
||||
return None
|
||||
|
||||
def update(self):
|
||||
|
||||
print("Updating " + self.epgtypename + " EPG cache file.")
|
||||
method_to_call = getattr(self, self.epg_method)
|
||||
func_to_call = getattr(method_to_call, 'update_epg')
|
||||
programguide = func_to_call()
|
||||
|
||||
programguide = OrderedDict(sorted(programguide.items()))
|
||||
for cnum in programguide:
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
with open(self.epg_cache_file, 'w') as epgfile:
|
||||
epgfile.write(json.dumps(programguide, indent=4))
|
||||
print("Wrote " + self.epgtypename + " EPG cache file.")
|
||||
@ -1,38 +1,13 @@
|
||||
import os
|
||||
import json
|
||||
import datetime
|
||||
|
||||
|
||||
class EmptyEPG():
|
||||
class BlocksEPG():
|
||||
|
||||
def __init__(self, config, serviceproxy):
|
||||
|
||||
self.config = config.copy()
|
||||
self.serviceproxy = serviceproxy
|
||||
|
||||
self.postalcode = None
|
||||
|
||||
self.epg_cache = None
|
||||
self.cache_dir = self.config["empty"]["empty_cache"]
|
||||
self.epg_cache_file = self.config["empty"]["empty_cache_file"]
|
||||
self.epg_cache = self.epg_cache_open()
|
||||
|
||||
def epg_cache_open(self):
|
||||
epg_cache = None
|
||||
if os.path.isfile(self.epg_cache_file):
|
||||
with open(self.epg_cache_file, 'r') as epgfile:
|
||||
epg_cache = json.load(epgfile)
|
||||
return epg_cache
|
||||
|
||||
def thumb_url(self, thumb_type, base_url, thumbnail):
|
||||
if thumb_type == "channel":
|
||||
return "http://" + str(base_url) + str(thumbnail)
|
||||
elif thumb_type == "content":
|
||||
return "http://" + str(base_url) + str(thumbnail)
|
||||
def __init__(self, settings, origserv):
|
||||
self.config = settings
|
||||
self.origserv = origserv
|
||||
|
||||
def update_epg(self):
|
||||
print('Updating Empty EPG cache file.')
|
||||
|
||||
programguide = {}
|
||||
|
||||
timestamps = []
|
||||
@ -53,7 +28,7 @@ class EmptyEPG():
|
||||
}
|
||||
timestamps.append(timestampdict)
|
||||
|
||||
for c in self.serviceproxy.get_channels():
|
||||
for c in self.origserv.get_channels():
|
||||
if str(c["number"]) not in list(programguide.keys()):
|
||||
programguide[str(c["number"])] = {
|
||||
"callsign": c["callsign"],
|
||||
@ -85,11 +60,4 @@ class EmptyEPG():
|
||||
|
||||
programguide[str(c["number"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
for cnum in programguide:
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
self.epg_cache = programguide
|
||||
with open(self.epg_cache_file, 'w') as epgfile:
|
||||
epgfile.write(json.dumps(programguide, indent=4))
|
||||
print('Wrote updated Empty EPG cache file.')
|
||||
return programguide
|
||||
@ -1,141 +1,62 @@
|
||||
import os
|
||||
import json
|
||||
import time
|
||||
import datetime
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
def xmltimestamp_zap(inputtime):
|
||||
xmltime = inputtime.replace('Z', '+00:00')
|
||||
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||
return xmltime
|
||||
|
||||
|
||||
def xmldictmaker(inputdict, req_items, list_items=[], str_items=[]):
|
||||
xml_dict = {}
|
||||
|
||||
for origitem in list(inputdict.keys()):
|
||||
xml_dict[origitem] = inputdict[origitem]
|
||||
|
||||
for req_item in req_items:
|
||||
if req_item not in list(inputdict.keys()):
|
||||
xml_dict[req_item] = None
|
||||
if not xml_dict[req_item]:
|
||||
if req_item in list_items:
|
||||
xml_dict[req_item] = []
|
||||
elif req_item in str_items:
|
||||
xml_dict[req_item] = ""
|
||||
|
||||
return xml_dict
|
||||
from fHDHR.tools import xmldictmaker, WebReq
|
||||
from fHDHR.fHDHRerrors import EPGSetupError
|
||||
|
||||
|
||||
class ZapEPG():
|
||||
|
||||
def __init__(self, config, serviceproxy):
|
||||
def __init__(self, settings, origserv):
|
||||
self.config = settings
|
||||
self.origserv = origserv
|
||||
self.web = WebReq()
|
||||
|
||||
self.config = config.copy()
|
||||
self.serviceproxy = serviceproxy
|
||||
self.postalcode = self.config.dict["zap2it"]["postalcode"]
|
||||
|
||||
self.postalcode = None
|
||||
|
||||
self.epg_cache = None
|
||||
self.cache_dir = self.config["main"]["zap_web_cache"]
|
||||
self.epg_cache_file = self.config["zap2it"]["epg_cache"]
|
||||
self.epg_cache = self.epg_cache_open()
|
||||
self.web_cache_dir = self.config.dict["filedir"]["epg_cache"]["zap2it"]["web_cache"]
|
||||
|
||||
def get_location(self):
|
||||
self.postalcode = self.config["zap2it"]["postalcode"]
|
||||
if self.postalcode:
|
||||
url = 'http://ipinfo.io/json'
|
||||
response = urllib.request.urlopen(url)
|
||||
data = json.load(response)
|
||||
return data["postal"]
|
||||
|
||||
def epg_cache_open(self):
|
||||
epg_cache = None
|
||||
if os.path.isfile(self.epg_cache_file):
|
||||
with open(self.epg_cache_file, 'r') as epgfile:
|
||||
epg_cache = json.load(epgfile)
|
||||
return epg_cache
|
||||
|
||||
def thumb_url(self, thumb_type, base_url, thumbnail):
|
||||
if thumb_type == "channel":
|
||||
return thumbnail
|
||||
elif thumb_type == "content":
|
||||
return thumbnail
|
||||
|
||||
def get_cached(self, cache_key, delay, url):
|
||||
cache_path = self.cache_dir.joinpath(cache_key)
|
||||
if cache_path.is_file():
|
||||
print('FROM CACHE:', str(cache_path))
|
||||
with open(cache_path, 'rb') as f:
|
||||
return f.read()
|
||||
else:
|
||||
print('Fetching: ', url)
|
||||
print("Zap2it postalcode not set, attempting to retrieve.")
|
||||
if not self.postalcode:
|
||||
try:
|
||||
resp = urllib.request.urlopen(url)
|
||||
result = resp.read()
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 400:
|
||||
print('Got a 400 error! Ignoring it.')
|
||||
result = (
|
||||
b'{'
|
||||
b'"note": "Got a 400 error at this time, skipping.",'
|
||||
b'"channels": []'
|
||||
b'}')
|
||||
else:
|
||||
raise
|
||||
with open(cache_path, 'wb') as f:
|
||||
f.write(result)
|
||||
time.sleep(int(delay))
|
||||
return result
|
||||
|
||||
def remove_stale_cache(self, todaydate):
|
||||
for p in self.cache_dir.glob('*'):
|
||||
try:
|
||||
cachedate = datetime.datetime.strptime(str(p.name), "%Y-%m-%d")
|
||||
todaysdate = datetime.datetime.strptime(str(todaydate), "%Y-%m-%d")
|
||||
if cachedate >= todaysdate:
|
||||
continue
|
||||
postalcode_url = 'http://ipinfo.io/json'
|
||||
postalcode_req = self.web.session.get(postalcode_url)
|
||||
data = postalcode_req.json()
|
||||
self.postalcode = data["postal"]
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
print('Removing stale cache file:', p.name)
|
||||
p.unlink()
|
||||
raise EPGSetupError("Unable to automatically optain zap2it postalcode: " + str(e))
|
||||
return self.postalcode
|
||||
|
||||
def update_epg(self):
|
||||
print('Updating Zap2it EPG cache file.')
|
||||
programguide = {}
|
||||
|
||||
self.get_location()
|
||||
|
||||
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
||||
zap_time = time.mktime(time.localtime())
|
||||
zap_time_window = int(self.config["zap2it"]["timespan"]) * 3600
|
||||
zap_time_window = int(self.config.dict["zap2it"]["timespan"]) * 3600
|
||||
zap_time = int(zap_time - (zap_time % zap_time_window))
|
||||
|
||||
self.remove_stale_cache(zap_time)
|
||||
|
||||
# Fetch data in `zap_timespan` chunks.
|
||||
for i in range(int(7 * 24 / int(self.config["zap2it"]["timespan"]))):
|
||||
for i in range(int(7 * 24 / int(self.config.dict["zap2it"]["timespan"]))):
|
||||
i_time = zap_time + (i * zap_time_window)
|
||||
|
||||
parameters = {
|
||||
'aid': self.config["zap2it"]['affiliate_id'],
|
||||
'country': self.config["zap2it"]['country'],
|
||||
'device': self.config["zap2it"]['device'],
|
||||
'headendId': self.config["zap2it"]['headendid'],
|
||||
'aid': self.config.dict["zap2it"]['affiliate_id'],
|
||||
'country': self.config.dict["zap2it"]['country'],
|
||||
'device': self.config.dict["zap2it"]['device'],
|
||||
'headendId': self.config.dict["zap2it"]['headendid'],
|
||||
'isoverride': "true",
|
||||
'languagecode': self.config["zap2it"]['languagecode'],
|
||||
'languagecode': self.config.dict["zap2it"]['languagecode'],
|
||||
'pref': 'm,p',
|
||||
'timespan': self.config["zap2it"]['timespan'],
|
||||
'timezone': self.config["zap2it"]['timezone'],
|
||||
'userId': self.config["zap2it"]['userid'],
|
||||
'postalCode': self.postalcode,
|
||||
'lineupId': '%s-%s-DEFAULT' % (self.config["zap2it"]['country'], self.config["zap2it"]['device']),
|
||||
'timespan': self.config.dict["zap2it"]['timespan'],
|
||||
'timezone': self.config.dict["zap2it"]['timezone'],
|
||||
'userId': self.config.dict["zap2it"]['userid'],
|
||||
'postalCode': str(self.postalcode or self.get_location()),
|
||||
'lineupId': '%s-%s-DEFAULT' % (self.config.dict["zap2it"]['country'], self.config.dict["zap2it"]['device']),
|
||||
'time': i_time,
|
||||
'Activity_ID': 1,
|
||||
'FromPage': "TV%20Guide",
|
||||
@ -144,7 +65,7 @@ class ZapEPG():
|
||||
url = 'https://tvlistings.zap2it.com/api/grid?'
|
||||
url += urllib.parse.urlencode(parameters)
|
||||
|
||||
result = self.get_cached(str(i_time), self.config["zap2it"]['delay'], url)
|
||||
result = self.get_cached(str(i_time), self.config.dict["zap2it"]['delay'], url)
|
||||
d = json.loads(result)
|
||||
|
||||
for c in d['channels']:
|
||||
@ -168,8 +89,8 @@ class ZapEPG():
|
||||
progdict = xmldictmaker(event['program'], ["title", "sub-title", "releaseYear", "episodeTitle", "shortDesc", "season", "episode", "id"])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": xmltimestamp_zap(eventdict['startTime']),
|
||||
"time_end": xmltimestamp_zap(eventdict['endTime']),
|
||||
"time_start": self.xmltimestamp_zap(eventdict['startTime']),
|
||||
"time_end": self.xmltimestamp_zap(eventdict['endTime']),
|
||||
"duration_minutes": eventdict['duration'],
|
||||
"thumbnail": str("https://zap2it.tmsimg.com/assets/" + str(eventdict['thumbnail']) + ".jpg"),
|
||||
"title": progdict['title'] or "Unavailable",
|
||||
@ -182,7 +103,7 @@ class ZapEPG():
|
||||
"seasonnumber": progdict['season'],
|
||||
"episodenumber": progdict['episode'],
|
||||
"isnew": False,
|
||||
"id": progdict['id'] or xmltimestamp_zap(eventdict['startTime']),
|
||||
"id": progdict['id'] or self.xmltimestamp_zap(eventdict['startTime']),
|
||||
}
|
||||
|
||||
for f in eventdict['filter']:
|
||||
@ -198,11 +119,38 @@ class ZapEPG():
|
||||
|
||||
programguide[str(cdict["channelNo"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
for cnum in programguide:
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
self.epg_cache = programguide
|
||||
with open(self.epg_cache_file, 'w') as epgfile:
|
||||
epgfile.write(json.dumps(programguide, indent=4))
|
||||
print('Wrote updated Zap2it EPG cache file.')
|
||||
return programguide
|
||||
|
||||
def xmltimestamp_zap(self, inputtime):
|
||||
xmltime = inputtime.replace('Z', '+00:00')
|
||||
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||
return xmltime
|
||||
|
||||
def get_cached(self, cache_key, delay, url):
|
||||
cache_path = self.web_cache_dir.joinpath(cache_key)
|
||||
if cache_path.is_file():
|
||||
print('FROM CACHE:', str(cache_path))
|
||||
with open(cache_path, 'rb') as f:
|
||||
return f.read()
|
||||
else:
|
||||
print('Fetching: ', url)
|
||||
resp = self.web.session.get(url)
|
||||
result = resp.content
|
||||
with open(cache_path, 'wb') as f:
|
||||
f.write(result)
|
||||
time.sleep(int(delay))
|
||||
return result
|
||||
|
||||
def remove_stale_cache(self, todaydate):
|
||||
for p in self.web_cache_dir.glob('*'):
|
||||
try:
|
||||
cachedate = datetime.datetime.strptime(str(p.name), "%Y-%m-%d")
|
||||
todaysdate = datetime.datetime.strptime(str(todaydate), "%Y-%m-%d")
|
||||
if cachedate >= todaysdate:
|
||||
continue
|
||||
except Exception as e:
|
||||
print(e)
|
||||
pass
|
||||
print('Removing stale cache file:', p.name)
|
||||
p.unlink()
|
||||
110
fHDHR/epghandler/xmltv.py
Normal file
110
fHDHR/epghandler/xmltv.py
Normal file
@ -0,0 +1,110 @@
|
||||
import xml.etree.ElementTree
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
class xmlTV():
|
||||
"""Methods to create xmltv.xml"""
|
||||
|
||||
def __init__(self, settings):
|
||||
self.config = settings
|
||||
self.epg_method = self.config.dict["fhdhr"]["epg_method"]
|
||||
|
||||
def sub_el(self, parent, name, text=None, **kwargs):
|
||||
el = xml.etree.ElementTree.SubElement(parent, name, **kwargs)
|
||||
if text:
|
||||
el.text = text
|
||||
return el
|
||||
|
||||
def xmltv_headers(self):
|
||||
"""This method creates the XML headers for our xmltv"""
|
||||
xmltvgen = xml.etree.ElementTree.Element('tv')
|
||||
xmltvgen.set('source-info-url', self.config.dict["fhdhr"]["friendlyname"])
|
||||
xmltvgen.set('source-info-name', self.config.dict["main"]["servicename"])
|
||||
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||
xmltvgen.set('generator-info-url', 'fHDHR/' + self.config.dict["main"]["reponame"])
|
||||
return xmltvgen
|
||||
|
||||
def xmltv_file(self, xmltvgen):
|
||||
"""This method is used to close out the xml file"""
|
||||
xmltvfile = BytesIO()
|
||||
xmltvfile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
xmltvfile.write(xml.etree.ElementTree.tostring(xmltvgen, encoding='UTF-8'))
|
||||
return xmltvfile.getvalue()
|
||||
|
||||
def xmltv_empty(self):
|
||||
"""This method is called when creation of a full xmltv is not possible"""
|
||||
return self.xmltv_file(self.xmltv_headers())
|
||||
|
||||
def create_xmltv(self, base_url, epgdict):
|
||||
if not epgdict:
|
||||
return self.xmltv_empty()
|
||||
|
||||
out = self.xmltv_headers()
|
||||
|
||||
for c in list(epgdict.keys()):
|
||||
|
||||
c_out = self.sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||
self.sub_el(c_out, 'display-name',
|
||||
text='%s %s' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||
self.sub_el(c_out, 'display-name',
|
||||
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||
self.sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||
self.sub_el(c_out, 'display-name',
|
||||
text='%s %s fcc' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||
self.sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||
self.sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||
self.sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||
|
||||
if epgdict[c]["thumbnail"] is not None:
|
||||
self.sub_el(c_out, 'icon', src=("http://" + str(base_url) + "/images?source=epg&type=channel&id=" + epgdict[c]['id']))
|
||||
else:
|
||||
self.sub_el(c_out, 'icon', src=("http://" + str(base_url) + "/images?source=generate&message=" + epgdict[c]['number']))
|
||||
|
||||
for channelnum in list(epgdict.keys()):
|
||||
|
||||
channel_listing = epgdict[channelnum]['listing']
|
||||
|
||||
for program in channel_listing:
|
||||
|
||||
prog_out = self.sub_el(out, 'programme',
|
||||
start=program['time_start'],
|
||||
stop=program['time_end'],
|
||||
channel=str(channelnum))
|
||||
|
||||
self.sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||
|
||||
self.sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||
|
||||
self.sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||
|
||||
self.sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||
|
||||
for f in program['genres']:
|
||||
self.sub_el(prog_out, 'category', lang='en', text=f)
|
||||
self.sub_el(prog_out, 'genre', lang='en', text=f)
|
||||
|
||||
if program['seasonnumber'] and program['episodenumber']:
|
||||
s_ = int(str(program['seasonnumber']), 10)
|
||||
e_ = int(str(program['episodenumber']), 10)
|
||||
self.sub_el(prog_out, 'episode-num', system='dd_progid',
|
||||
text=str(program['id']))
|
||||
self.sub_el(prog_out, 'episode-num', system='common',
|
||||
text='S%02dE%02d' % (s_, e_))
|
||||
self.sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||
self.sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||
text='S%02dE%02d' % (s_, e_))
|
||||
|
||||
if program["thumbnail"]:
|
||||
self.sub_el(prog_out, 'icon', src=("http://" + str(base_url) + "/images?source=epg&type=content&id=" + program['id']))
|
||||
else:
|
||||
self.sub_el(prog_out, 'icon', src=("http://" + str(base_url) + "/images?source=generate&message=" + program['title'].replace(" ", "")))
|
||||
|
||||
if program['rating']:
|
||||
rating_out = self.sub_el(prog_out, 'rating', system="MPAA")
|
||||
self.sub_el(rating_out, 'value', text=program['rating'])
|
||||
|
||||
if program['isnew']:
|
||||
self.sub_el(prog_out, 'new')
|
||||
|
||||
return self.xmltv_file(out)
|
||||
32
fHDHR/fHDHRerrors/__init__.py
Normal file
32
fHDHR/fHDHRerrors/__init__.py
Normal file
@ -0,0 +1,32 @@
|
||||
|
||||
class LoginError(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return 'LoginError: %s' % self.value
|
||||
|
||||
|
||||
class EPGSetupError(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return 'EPGSetupError: %s' % self.value
|
||||
|
||||
|
||||
class ConfigurationError(Exception):
|
||||
def __init__(self, value):
|
||||
self.value = value
|
||||
|
||||
def __str__(self):
|
||||
return 'ConfigurationError: %s' % self.value
|
||||
|
||||
|
||||
class ConfigurationNotFound(ConfigurationError):
|
||||
def __init__(self, filename):
|
||||
super(ConfigurationNotFound, self).__init__(None)
|
||||
self.filename = filename
|
||||
|
||||
def __str__(self):
|
||||
return 'Unable to find the configuration file %s' % self.filename
|
||||
@ -4,7 +4,6 @@ from flask import (Flask, send_from_directory, request, Response,
|
||||
from io import BytesIO
|
||||
import xml.etree.ElementTree as ET
|
||||
import json
|
||||
import time
|
||||
import requests
|
||||
import subprocess
|
||||
import threading
|
||||
@ -28,7 +27,7 @@ def getSize(txt, font):
|
||||
|
||||
class HDHR_Hub():
|
||||
config = None
|
||||
serviceproxy = None
|
||||
origserv = None
|
||||
epghandling = None
|
||||
station_scan = False
|
||||
station_list = []
|
||||
@ -38,11 +37,11 @@ class HDHR_Hub():
|
||||
self.tuner_lock = threading.Lock()
|
||||
self.tuners = 0
|
||||
|
||||
def hubprep(self, config, serviceproxy, epghandling):
|
||||
def hubprep(self, config, origserv, epghandling):
|
||||
self.config = config
|
||||
self.max_tuners = int(self.config["fakehdhr"]["tuner_count"])
|
||||
self.max_tuners = int(self.config.dict["fhdhr"]["tuner_count"])
|
||||
self.station_scan = False
|
||||
self.serviceproxy = serviceproxy
|
||||
self.origserv = origserv
|
||||
self.epghandling = epghandling
|
||||
|
||||
def tuner_usage(self, number):
|
||||
@ -62,46 +61,56 @@ class HDHR_Hub():
|
||||
def get_xmltv(self, base_url):
|
||||
return self.epghandling.get_xmltv(base_url)
|
||||
|
||||
def get_image(self, req_args):
|
||||
|
||||
imageid = req_args["id"]
|
||||
|
||||
if req_args["source"] == "proxy":
|
||||
if req_args["type"] == "channel":
|
||||
imageUri = self.serviceproxy.get_channel_thumbnail(imageid)
|
||||
elif req_args["type"] == "content":
|
||||
imageUri = self.serviceproxy.get_content_thumbnail(imageid)
|
||||
req = requests.get(imageUri)
|
||||
return req.content
|
||||
|
||||
elif req_args["source"] == "empty":
|
||||
if req_args["type"] == "channel":
|
||||
def generate_image(self, messagetype, message):
|
||||
if messagetype == "channel":
|
||||
width = 360
|
||||
height = 270
|
||||
text = req_args["id"]
|
||||
fontsize = 72
|
||||
elif req_args["type"] == "content":
|
||||
elif messagetype == "content":
|
||||
width = 1080
|
||||
height = 1440
|
||||
fontsize = 100
|
||||
text = req_args["id"]
|
||||
|
||||
colorBackground = "#228822"
|
||||
colorText = "#717D7E"
|
||||
colorOutline = "#717D7E"
|
||||
fontname = str(self.config["fakehdhr"]["font"])
|
||||
fontname = str(self.config.dict["filedir"]["font"])
|
||||
|
||||
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||
text_width, text_height = getSize(text, font)
|
||||
text_width, text_height = getSize(message, font)
|
||||
img = PIL.Image.new('RGBA', (width+4, height+4), colorBackground)
|
||||
d = PIL.ImageDraw.Draw(img)
|
||||
d.text(((width-text_width)/2, (height-text_height)/2), text, fill=colorText, font=font)
|
||||
d.text(((width-text_width)/2, (height-text_height)/2), message, fill=colorText, font=font)
|
||||
d.rectangle((0, 0, width+3, height+3), outline=colorOutline)
|
||||
|
||||
s = BytesIO()
|
||||
img.save(s, 'png')
|
||||
return s.getvalue()
|
||||
|
||||
def get_image(self, req_args):
|
||||
|
||||
imageUri = self.epghandling.get_thumbnail(req_args["type"], req_args["id"])
|
||||
if not imageUri:
|
||||
return self.generate_image(req_args["type"], req_args["id"])
|
||||
|
||||
try:
|
||||
req = requests.get(imageUri)
|
||||
return req.content
|
||||
except Exception as e:
|
||||
print(e)
|
||||
return self.generate_image(req_args["type"], req_args["id"])
|
||||
|
||||
def get_image_type(self, image_data):
|
||||
header_byte = image_data[0:3].hex().lower()
|
||||
if header_byte == '474946':
|
||||
return "image/gif"
|
||||
elif header_byte == '89504e':
|
||||
return "image/png"
|
||||
elif header_byte == 'ffd8ff':
|
||||
return "image/jpeg"
|
||||
else:
|
||||
return "image/jpeg"
|
||||
|
||||
def get_xmldiscover(self, base_url):
|
||||
out = ET.Element('root')
|
||||
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||
@ -114,12 +123,12 @@ class HDHR_Hub():
|
||||
|
||||
device_out = sub_el(out, 'device')
|
||||
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||
sub_el(device_out, 'friendlyName', self.config["fakehdhr"]["friendlyname"])
|
||||
sub_el(device_out, 'manufacturer', "Silicondust")
|
||||
sub_el(device_out, 'modelName', self.config["dev"]["reporting_model"])
|
||||
sub_el(device_out, 'modelNumber', self.config["dev"]["reporting_model"])
|
||||
sub_el(device_out, 'friendlyName', self.config.dict["fhdhr"]["friendlyname"])
|
||||
sub_el(device_out, 'manufacturer', self.config.dict["dev"]["reporting_manufacturer"])
|
||||
sub_el(device_out, 'modelName', self.config.dict["dev"]["reporting_model"])
|
||||
sub_el(device_out, 'modelNumber', self.config.dict["dev"]["reporting_model"])
|
||||
sub_el(device_out, 'serialNumber')
|
||||
sub_el(device_out, 'UDN', "uuid:" + self.config["main"]["uuid"])
|
||||
sub_el(device_out, 'UDN', "uuid:" + self.config.dict["main"]["uuid"])
|
||||
|
||||
fakefile = BytesIO()
|
||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
@ -128,13 +137,13 @@ class HDHR_Hub():
|
||||
|
||||
def get_discover_json(self, base_url):
|
||||
jsondiscover = {
|
||||
"FriendlyName": self.config["fakehdhr"]["friendlyname"],
|
||||
"Manufacturer": "Silicondust",
|
||||
"ModelNumber": self.config["dev"]["reporting_model"],
|
||||
"FirmwareName": self.config["dev"]["reporting_firmware_name"],
|
||||
"TunerCount": self.config["fakehdhr"]["tuner_count"],
|
||||
"FirmwareVersion": self.config["dev"]["reporting_firmware_ver"],
|
||||
"DeviceID": self.config["main"]["uuid"],
|
||||
"FriendlyName": self.config.dict["fhdhr"]["friendlyname"],
|
||||
"Manufacturer": "Borondust",
|
||||
"ModelNumber": self.config.dict["dev"]["reporting_model"],
|
||||
"FirmwareName": self.config.dict["dev"]["reporting_firmware_name"],
|
||||
"TunerCount": self.config.dict["fhdhr"]["tuner_count"],
|
||||
"FirmwareVersion": self.config.dict["dev"]["reporting_firmware_ver"],
|
||||
"DeviceID": self.config.dict["main"]["uuid"],
|
||||
"DeviceAuth": "fHDHR",
|
||||
"BaseURL": "http://" + base_url,
|
||||
"LineupURL": "http://" + base_url + "/lineup.json"
|
||||
@ -143,7 +152,7 @@ class HDHR_Hub():
|
||||
|
||||
def get_lineup_status(self):
|
||||
if self.station_scan:
|
||||
channel_count = self.serviceproxy.get_station_total()
|
||||
channel_count = self.origserv.get_station_total()
|
||||
jsonlineup = {
|
||||
"ScanInProgress": "true",
|
||||
"Progress": 99,
|
||||
@ -153,14 +162,14 @@ class HDHR_Hub():
|
||||
jsonlineup = {
|
||||
"ScanInProgress": "false",
|
||||
"ScanPossible": "true",
|
||||
"Source": self.config["dev"]["reporting_tuner_type"],
|
||||
"SourceList": [self.config["dev"]["reporting_tuner_type"]],
|
||||
"Source": self.config.dict["dev"]["reporting_tuner_type"],
|
||||
"SourceList": [self.config.dict["dev"]["reporting_tuner_type"]],
|
||||
}
|
||||
return jsonlineup
|
||||
|
||||
def get_lineup_xml(self, base_url):
|
||||
out = ET.Element('Lineup')
|
||||
station_list = self.serviceproxy.get_station_list(base_url)
|
||||
station_list = self.origserv.get_station_list(base_url)
|
||||
for station_item in station_list:
|
||||
program_out = sub_el(out, 'Program')
|
||||
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
|
||||
@ -199,11 +208,11 @@ class HDHR_HTTP_Server():
|
||||
|
||||
@app.route('/')
|
||||
def root_path():
|
||||
return hdhr.config["fakehdhr"]["friendlyname"]
|
||||
return hdhr.config.dict["fhdhr"]["friendlyname"]
|
||||
|
||||
@app.route('/favicon.ico', methods=['GET'])
|
||||
def favicon():
|
||||
return send_from_directory(hdhr.config["main"]["www_dir"],
|
||||
return send_from_directory(hdhr.config.dict["filedir"]["www_dir"],
|
||||
'favicon.ico',
|
||||
mimetype='image/vnd.microsoft.icon')
|
||||
|
||||
@ -241,7 +250,7 @@ class HDHR_HTTP_Server():
|
||||
@app.route('/lineup.json', methods=['GET'])
|
||||
def lineup_json():
|
||||
base_url = request.headers["host"]
|
||||
station_list = hdhr.serviceproxy.get_station_list(base_url)
|
||||
station_list = hdhr.origserv.get_station_list(base_url)
|
||||
return Response(status=200,
|
||||
response=json.dumps(station_list, indent=4),
|
||||
mimetype='application/json')
|
||||
@ -263,12 +272,36 @@ class HDHR_HTTP_Server():
|
||||
mimetype='application/json')
|
||||
|
||||
@app.route('/images', methods=['GET'])
|
||||
def images_nothing():
|
||||
if ('source' not in list(request.args.keys()) or 'id' not in list(request.args.keys()) or 'type' not in list(request.args.keys())):
|
||||
abort(404)
|
||||
def images():
|
||||
|
||||
image = hdhr.get_image(request.args)
|
||||
return Response(image, content_type='image/png', direct_passthrough=True)
|
||||
if 'source' not in list(request.args.keys()):
|
||||
image = hdhr.generate_image("content", "Unknown Request")
|
||||
else:
|
||||
|
||||
itemtype = 'content'
|
||||
if 'type' in list(request.args.keys()):
|
||||
itemtype = request.args["type"]
|
||||
|
||||
if request.args['source'] == 'epg':
|
||||
if 'id' in list(request.args.keys()):
|
||||
req_dict = {
|
||||
"source": request.args["source"],
|
||||
"type": request.args["type"],
|
||||
"id": request.args["id"],
|
||||
}
|
||||
image = hdhr.get_image(req_dict)
|
||||
else:
|
||||
itemmessage = "Unknown Request"
|
||||
image = hdhr.generate_image(itemtype, itemmessage)
|
||||
elif request.args['source'] == 'generate':
|
||||
itemmessage = "Unknown Request"
|
||||
if 'message' in list(request.args.keys()):
|
||||
itemmessage = request.args["message"]
|
||||
image = hdhr.generate_image(itemtype, itemmessage)
|
||||
else:
|
||||
itemmessage = "Unknown Request"
|
||||
image = hdhr.generate_image(itemtype, itemmessage)
|
||||
return Response(image, content_type=hdhr.get_image_type(image), direct_passthrough=True)
|
||||
|
||||
@app.route('/watch', methods=['GET'])
|
||||
def watch():
|
||||
@ -280,27 +313,24 @@ class HDHR_HTTP_Server():
|
||||
|
||||
tuner = hdhr.get_tuner()
|
||||
if not tuner:
|
||||
print("A " + method + " stream request for channel " +
|
||||
str(channel_id) + " was rejected do to a lack of available tuners.")
|
||||
abort(503)
|
||||
|
||||
channelUri = hdhr.serviceproxy.get_channel_stream(channel_id)
|
||||
print("Attempting a " + method + " stream request for channel " + str(channel_id))
|
||||
hdhr.tuner_usage(1)
|
||||
|
||||
channelUri = hdhr.origserv.get_channel_stream(channel_id)
|
||||
# print("Proxy URL determined as " + str(channelUri))
|
||||
|
||||
if method == "direct":
|
||||
duration = request.args.get('duration', default=0, type=int)
|
||||
|
||||
if not duration == 0:
|
||||
duration += time.time()
|
||||
chunksize = int(hdhr.config.dict["direct_stream"]['chunksize'])
|
||||
|
||||
req = requests.get(channelUri, stream=True)
|
||||
hdhr.tuner_usage(1)
|
||||
|
||||
def generate():
|
||||
try:
|
||||
yield ''
|
||||
for chunk in req.iter_content(chunk_size=int(hdhr.config["direct_stream"]['chunksize'])):
|
||||
if not duration == 0 and not time.time() < duration:
|
||||
req.close()
|
||||
hdhr.tuner_usage(-1)
|
||||
break
|
||||
for chunk in req.iter_content(chunk_size=chunksize):
|
||||
yield chunk
|
||||
except GeneratorExit:
|
||||
req.close()
|
||||
@ -311,7 +341,9 @@ class HDHR_HTTP_Server():
|
||||
|
||||
elif method == "ffmpeg":
|
||||
|
||||
ffmpeg_command = [hdhr.config["ffmpeg"]["ffmpeg_path"],
|
||||
bytes_per_read = int(hdhr.config.dict["ffmpeg"]["bytes_per_read"])
|
||||
|
||||
ffmpeg_command = [hdhr.config.dict["ffmpeg"]["ffmpeg_path"],
|
||||
"-i", channelUri,
|
||||
"-c", "copy",
|
||||
"-f", "mpegts",
|
||||
@ -321,12 +353,11 @@ class HDHR_HTTP_Server():
|
||||
]
|
||||
|
||||
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||
hdhr.tuner_usage(1)
|
||||
|
||||
def generate():
|
||||
try:
|
||||
while True:
|
||||
videoData = ffmpeg_proc.stdout.read(int(hdhr.config["ffmpeg"]["bytes_per_read"]))
|
||||
videoData = ffmpeg_proc.stdout.read(bytes_per_read)
|
||||
if not videoData:
|
||||
break
|
||||
try:
|
||||
@ -366,12 +397,12 @@ class HDHR_HTTP_Server():
|
||||
return Response(status=200, response=currenthtmlerror, mimetype='text/html')
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config.copy()
|
||||
self.config = config
|
||||
|
||||
def run(self):
|
||||
self.http = WSGIServer((
|
||||
self.config["fakehdhr"]["address"],
|
||||
int(self.config["fakehdhr"]["port"])
|
||||
self.config.dict["fhdhr"]["address"],
|
||||
int(self.config.dict["fhdhr"]["port"])
|
||||
), self.app.wsgi_app)
|
||||
try:
|
||||
self.http.serve_forever()
|
||||
@ -379,7 +410,8 @@ class HDHR_HTTP_Server():
|
||||
self.http.stop()
|
||||
|
||||
|
||||
def interface_start(config, serviceproxy, epghandling):
|
||||
hdhr.hubprep(config, serviceproxy, epghandling)
|
||||
def interface_start(config, origserv, epghandling):
|
||||
print("Starting fHDHR Web Interface")
|
||||
hdhr.hubprep(config, origserv, epghandling)
|
||||
fakhdhrserver = HDHR_HTTP_Server(config)
|
||||
fakhdhrserver.run()
|
||||
89
fHDHR/originservice/__init__.py
Normal file
89
fHDHR/originservice/__init__.py
Normal file
@ -0,0 +1,89 @@
|
||||
import datetime
|
||||
|
||||
import fHDHR.originservice.nextpvr as serviceorigin
|
||||
from fHDHR.tools import hours_between_datetime
|
||||
from fHDHR.fHDHRerrors import LoginError
|
||||
|
||||
|
||||
class OriginService():
|
||||
|
||||
def __init__(self, settings):
|
||||
self.config = settings
|
||||
self.serviceorigin = serviceorigin.fHDHRservice(settings)
|
||||
if not self.serviceorigin.login():
|
||||
raise LoginError(self.config.dict["main"]["servicename"] + " Login Failed.")
|
||||
|
||||
self.streamtype = self.config.dict["fhdhr"]["stream_type"]
|
||||
self.channels = {
|
||||
"list": {},
|
||||
"list_updated": None,
|
||||
}
|
||||
|
||||
def append_channel_info(self, chanlist):
|
||||
for chan in chanlist:
|
||||
if chan["number"] not in list(self.channels["list"].keys()):
|
||||
self.channels["list"][chan["number"]] = {}
|
||||
for chankey in list(chan.keys()):
|
||||
self.channels["list"][chan["number"]][chankey] = chan[chankey]
|
||||
|
||||
def get_channels(self):
|
||||
|
||||
updatelist = False
|
||||
if not self.channels["list_updated"]:
|
||||
updatelist = True
|
||||
elif hours_between_datetime(self.channels["list_updated"], datetime.datetime.now()) > 12:
|
||||
updatelist = True
|
||||
|
||||
if updatelist:
|
||||
chanlist = self.serviceorigin.get_channels()
|
||||
self.append_channel_info(chanlist)
|
||||
self.channels["list_updated"] = datetime.datetime.now()
|
||||
|
||||
channel_list = []
|
||||
for chandict in list(self.channels["list"].keys()):
|
||||
channel_list.append(self.channels["list"][chandict])
|
||||
return channel_list
|
||||
|
||||
def get_fhdhr_stream_url(self, base_url, channel):
|
||||
return ('%s%s/watch?method=%s&channel=%s' %
|
||||
("http://",
|
||||
base_url,
|
||||
self.streamtype,
|
||||
channel['number']))
|
||||
|
||||
def get_station_list(self, base_url):
|
||||
station_list = []
|
||||
|
||||
for c in self.get_channels():
|
||||
station_list.append({
|
||||
'GuideNumber': c['number'],
|
||||
'GuideName': c['name'],
|
||||
'URL': self.get_fhdhr_stream_url(base_url, c),
|
||||
})
|
||||
return station_list
|
||||
|
||||
def get_channel_stream(self, channel_number):
|
||||
if channel_number not in list(self.channels["list"].keys()):
|
||||
self.get_channels()
|
||||
if channel_number not in list(self.channels["list"].keys()):
|
||||
return None
|
||||
if "stream_url" not in list(self.channels["list"][channel_number].keys()):
|
||||
chandict = self.get_channel_dict("number", channel_number)
|
||||
streamlist, caching = self.serviceorigin.get_channel_stream(chandict, self.channels["list"])
|
||||
if caching:
|
||||
self.append_channel_info(streamlist)
|
||||
return self.channels["list"][channel_number]["stream_url"]
|
||||
else:
|
||||
chanstreamdict = next(item for item in streamlist if item["number"] == channel_number)
|
||||
return chanstreamdict["stream_url"]
|
||||
|
||||
def get_station_total(self):
|
||||
chanlist = self.get_channels()
|
||||
return len(chanlist)
|
||||
|
||||
def get_channel_dict(self, keyfind, valfind):
|
||||
chanlist = self.get_channels()
|
||||
return next(item for item in chanlist if item[keyfind] == valfind)
|
||||
|
||||
def update_epg(self):
|
||||
return self.serviceorigin.update_epg()
|
||||
198
fHDHR/originservice/nextpvr.py
Normal file
198
fHDHR/originservice/nextpvr.py
Normal file
@ -0,0 +1,198 @@
|
||||
import xmltodict
|
||||
import json
|
||||
import hashlib
|
||||
import datetime
|
||||
|
||||
import fHDHR.tools
|
||||
|
||||
|
||||
class fHDHRservice():
|
||||
def __init__(self, settings):
|
||||
self.config = settings
|
||||
|
||||
self.web = fHDHR.tools.WebReq()
|
||||
|
||||
def login(self):
|
||||
print("Logging into NextPVR")
|
||||
self.sid = self.get_sid()
|
||||
if not self.sid:
|
||||
return False
|
||||
else:
|
||||
print("NextPVR Login Success")
|
||||
self.config.write(self.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
||||
return True
|
||||
|
||||
def get_sid(self):
|
||||
if self.config.dict["origin"]["sid"]:
|
||||
return self.config.dict["origin"]["sid"]
|
||||
|
||||
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
))
|
||||
|
||||
initiate_req = self.web.session.get(initiate_url)
|
||||
initiate_dict = xmltodict.parse(initiate_req.content)
|
||||
|
||||
sid = initiate_dict['rsp']['sid']
|
||||
salt = initiate_dict['rsp']['salt']
|
||||
md5PIN = hashlib.md5(str(self.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
||||
string = ':%s:%s' % (md5PIN, salt)
|
||||
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||
|
||||
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
sid,
|
||||
clientKey
|
||||
))
|
||||
login_req = self.web.session.get(login_url)
|
||||
login_dict = xmltodict.parse(login_req.content)
|
||||
|
||||
loginsuccess = None
|
||||
if login_dict['rsp']['@stat'] == "ok":
|
||||
if login_dict['rsp']['allow_watch'] == "true":
|
||||
loginsuccess = sid
|
||||
|
||||
return loginsuccess
|
||||
|
||||
def get_channels(self):
|
||||
|
||||
data_url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
self.sid
|
||||
))
|
||||
|
||||
data_req = self.web.session.get(data_url)
|
||||
data_dict = xmltodict.parse(data_req.content)
|
||||
|
||||
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||
print("Could not retrieve channel list")
|
||||
return []
|
||||
|
||||
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||
|
||||
channel_list = []
|
||||
for c in channel_o_list:
|
||||
dString = json.dumps(c)
|
||||
channel_dict = eval(dString)
|
||||
|
||||
clean_station_item = {
|
||||
"name": channel_dict["name"],
|
||||
"callsign": channel_dict["name"],
|
||||
"number": channel_dict["formatted-number"],
|
||||
"id": channel_dict["id"],
|
||||
}
|
||||
channel_list.append(clean_station_item)
|
||||
return channel_list
|
||||
|
||||
def get_channel_stream(self, chandict, allchandict):
|
||||
caching = True
|
||||
streamlist = []
|
||||
streamdict = {}
|
||||
streamurl = ('%s%s:%s/live?channel=%s&client=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(chandict["number"]),
|
||||
str(chandict["number"]),
|
||||
))
|
||||
streamdict = {"number": chandict["number"], "stream_url": streamurl}
|
||||
streamlist.append(streamdict)
|
||||
return streamlist, caching
|
||||
|
||||
def get_channel_thumbnail(self, channel_id):
|
||||
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(channel_id)
|
||||
))
|
||||
return channel_thumb_url
|
||||
|
||||
def get_content_thumbnail(self, content_id):
|
||||
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
self.config.dict["origin"]["sid"],
|
||||
str(content_id)
|
||||
))
|
||||
return item_thumb_url
|
||||
|
||||
def update_epg(self):
|
||||
programguide = {}
|
||||
|
||||
for c in self.get_channels():
|
||||
|
||||
cdict = fHDHR.tools.xmldictmaker(c, ["callsign", "name", "number", "id"])
|
||||
|
||||
if str(cdict['number']) not in list(programguide.keys()):
|
||||
|
||||
programguide[str(cdict['number'])] = {
|
||||
"callsign": cdict["callsign"],
|
||||
"name": cdict["name"] or cdict["callsign"],
|
||||
"number": cdict["number"],
|
||||
"id": cdict["id"],
|
||||
"thumbnail": self.get_channel_thumbnail(cdict['id']),
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(cdict["id"]),
|
||||
))
|
||||
epg_req = self.web.session.get(epg_url)
|
||||
epg_dict = xmltodict.parse(epg_req.content)
|
||||
|
||||
for program_listing in epg_dict["rsp"]["listings"]:
|
||||
for program_item in epg_dict["rsp"]["listings"][program_listing]:
|
||||
if not isinstance(program_item, str):
|
||||
|
||||
progdict = fHDHR.tools.xmldictmaker(program_item, ["start", "end", "title", "name", "subtitle", "rating", "description", "season", "episode", "id", "episodeTitle"])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": self.xmltimestamp_nextpvr(progdict["start"]),
|
||||
"time_end": self.xmltimestamp_nextpvr(progdict["end"]),
|
||||
"duration_minutes": self.duration_nextpvr_minutes(progdict["start"], progdict["end"]),
|
||||
"thumbnail": self.get_content_thumbnail(progdict['id']),
|
||||
"title": progdict['name'] or "Unavailable",
|
||||
"sub-title": progdict['subtitle'] or "Unavailable",
|
||||
"description": progdict['description'] or "Unavailable",
|
||||
"rating": progdict['rating'] or "N/A",
|
||||
"episodetitle": progdict['episodeTitle'],
|
||||
"releaseyear": None,
|
||||
"genres": [],
|
||||
"seasonnumber": progdict['season'],
|
||||
"episodenumber": progdict['episode'],
|
||||
"isnew": False,
|
||||
"id": progdict['id'] or self.xmltimestamp_nextpvr(progdict["start"]),
|
||||
}
|
||||
|
||||
if 'genre' in list(progdict.keys()):
|
||||
clean_prog_dict["genres"] = progdict['genre'].split(",")
|
||||
|
||||
if clean_prog_dict['sub-title'].startswith("Movie:"):
|
||||
clean_prog_dict['releaseyear'] = clean_prog_dict['sub-title'].split("Movie: ")[-1]
|
||||
clean_prog_dict['sub-title'] = "Unavailable"
|
||||
clean_prog_dict["genres"].append("Movie")
|
||||
|
||||
# TODO isNEW
|
||||
|
||||
programguide[str(cdict["number"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
return programguide
|
||||
|
||||
def xmltimestamp_nextpvr(self, epochtime):
|
||||
xmltime = datetime.datetime.fromtimestamp(int(epochtime)/1000)
|
||||
xmltime = str(xmltime.strftime('%Y%m%d%H%M%S')) + " +0000"
|
||||
return xmltime
|
||||
|
||||
def duration_nextpvr_minutes(self, starttime, endtime):
|
||||
return ((int(endtime) - int(starttime))/1000/60)
|
||||
@ -24,14 +24,17 @@ logger = logging.getLogger()
|
||||
|
||||
|
||||
# mostly from https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||
def ssdpServerProcess(config):
|
||||
def ssdpServerProcess(settings):
|
||||
ssdp = SSDPServer()
|
||||
ssdp.ssdp_port = 1900
|
||||
ssdp.register('local',
|
||||
'uuid:' + config["main"]["uuid"] + '::upnp:rootdevice',
|
||||
'uuid:' + settings.dict["main"]["uuid"] + '::upnp:rootdevice',
|
||||
'upnp:rootdevice',
|
||||
'http://' + config["fakehdhr"]["discovery_address"] + ':' +
|
||||
config["fakehdhr"]["port"] + '/device.xml')
|
||||
'http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
|
||||
str(settings.dict["fhdhr"]["port"]) + '/device.xml')
|
||||
print("SSDP server Started on port " + str(ssdp.ssdp_port) +
|
||||
" and broadcasting the availability of " + settings.dict["fhdhr"]["friendlyname"] +
|
||||
" at " 'http://' + settings.dict["fhdhr"]["discovery_address"] + ':' + str(settings.dict["fhdhr"]["port"]))
|
||||
try:
|
||||
ssdp.run()
|
||||
except KeyboardInterrupt:
|
||||
81
fHDHR/tools/__init__.py
Normal file
81
fHDHR/tools/__init__.py
Normal file
@ -0,0 +1,81 @@
|
||||
import os
|
||||
import sys
|
||||
import ast
|
||||
import requests
|
||||
|
||||
UNARY_OPS = (ast.UAdd, ast.USub)
|
||||
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
||||
|
||||
|
||||
def clean_exit():
|
||||
sys.stderr.flush()
|
||||
sys.stdout.flush()
|
||||
os._exit(0)
|
||||
|
||||
|
||||
def xmldictmaker(inputdict, req_items, list_items=[], str_items=[]):
|
||||
xml_dict = {}
|
||||
|
||||
for origitem in list(inputdict.keys()):
|
||||
xml_dict[origitem] = inputdict[origitem]
|
||||
|
||||
for req_item in req_items:
|
||||
if req_item not in list(inputdict.keys()):
|
||||
xml_dict[req_item] = None
|
||||
if not xml_dict[req_item]:
|
||||
if req_item in list_items:
|
||||
xml_dict[req_item] = []
|
||||
elif req_item in str_items:
|
||||
xml_dict[req_item] = ""
|
||||
|
||||
return xml_dict
|
||||
|
||||
|
||||
def is_arithmetic(s):
|
||||
def _is_arithmetic(node):
|
||||
if isinstance(node, ast.Num):
|
||||
return True
|
||||
elif isinstance(node, ast.Expression):
|
||||
return _is_arithmetic(node.body)
|
||||
elif isinstance(node, ast.UnaryOp):
|
||||
valid_op = isinstance(node.op, UNARY_OPS)
|
||||
return valid_op and _is_arithmetic(node.operand)
|
||||
elif isinstance(node, ast.BinOp):
|
||||
valid_op = isinstance(node.op, BINARY_OPS)
|
||||
return valid_op and _is_arithmetic(node.left) and _is_arithmetic(node.right)
|
||||
else:
|
||||
raise ValueError('Unsupported type {}'.format(node))
|
||||
|
||||
try:
|
||||
return _is_arithmetic(ast.parse(s, mode='eval'))
|
||||
except (SyntaxError, ValueError):
|
||||
return False
|
||||
|
||||
|
||||
def isint(x):
|
||||
try:
|
||||
a = float(x)
|
||||
b = int(a)
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
return a == b
|
||||
|
||||
|
||||
def isfloat(x):
|
||||
try:
|
||||
float(x)
|
||||
except ValueError:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
|
||||
def hours_between_datetime(first_time, later_time):
|
||||
timebetween = first_time - later_time
|
||||
return (timebetween.total_seconds() / 60 / 60)
|
||||
|
||||
|
||||
class WebReq():
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
@ -1,178 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import configparser
|
||||
import pathlib
|
||||
|
||||
|
||||
def clean_exit():
|
||||
sys.stderr.flush()
|
||||
sys.stdout.flush()
|
||||
os._exit(0)
|
||||
|
||||
|
||||
class HDHRConfig():
|
||||
|
||||
config_file = None
|
||||
config_handler = configparser.ConfigParser()
|
||||
script_dir = None
|
||||
|
||||
config = {}
|
||||
|
||||
def __init__(self, script_dir, args):
|
||||
self.get_config_path(script_dir, args)
|
||||
self.import_default_config(script_dir)
|
||||
self.import_service_config(script_dir)
|
||||
self.import_config()
|
||||
self.critical_config(script_dir)
|
||||
self.config_adjustments_this()
|
||||
self.config_adjustments()
|
||||
|
||||
def get_config_path(self, script_dir, args):
|
||||
if args.cfg:
|
||||
self.config_file = pathlib.Path(str(args.cfg))
|
||||
if not self.config_file or not os.path.exists(self.config_file):
|
||||
print("Config file missing, Exiting...")
|
||||
clean_exit()
|
||||
print("Loading Configuration File: " + str(self.config_file))
|
||||
|
||||
def import_config(self):
|
||||
self.config_handler.read(self.config_file)
|
||||
for each_section in self.config_handler.sections():
|
||||
if each_section not in list(self.config.keys()):
|
||||
self.config[each_section] = {}
|
||||
for (each_key, each_val) in self.config_handler.items(each_section):
|
||||
self.config[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
def import_default_config(self, script_dir):
|
||||
config_handler = configparser.ConfigParser()
|
||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||
internal_config_dir = pathlib.Path(data_dir).joinpath('internal_config')
|
||||
serviceconf = pathlib.Path(internal_config_dir).joinpath('fakehdhr.ini')
|
||||
config_handler.read(serviceconf)
|
||||
for each_section in config_handler.sections():
|
||||
if each_section not in list(self.config.keys()):
|
||||
self.config[each_section] = {}
|
||||
for (each_key, each_val) in config_handler.items(each_section):
|
||||
if each_val == "fHDHR_None":
|
||||
each_val = None
|
||||
elif each_val == "fHDHR_True":
|
||||
each_val = True
|
||||
elif each_val == "fHDHR_False":
|
||||
each_val = False
|
||||
self.config[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
def import_service_config(self, script_dir):
|
||||
config_handler = configparser.ConfigParser()
|
||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||
internal_config_dir = pathlib.Path(data_dir).joinpath('internal_config')
|
||||
serviceconf = pathlib.Path(internal_config_dir).joinpath('serviceconf.ini')
|
||||
config_handler.read(serviceconf)
|
||||
for each_section in config_handler.sections():
|
||||
if each_section not in list(self.config.keys()):
|
||||
self.config[each_section] = {}
|
||||
for (each_key, each_val) in config_handler.items(each_section):
|
||||
if each_val == "fHDHR_None":
|
||||
each_val = None
|
||||
elif each_val == "fHDHR_True":
|
||||
each_val = True
|
||||
elif each_val == "fHDHR_False":
|
||||
each_val = False
|
||||
self.config[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
def write(self, section, key, value):
|
||||
self.config[section][key] = value
|
||||
self.config_handler.set(section, key, value)
|
||||
|
||||
with open(self.config_file, 'w') as config_file:
|
||||
self.config_handler.write(config_file)
|
||||
|
||||
def critical_config(self, script_dir):
|
||||
|
||||
self.config["main"]["script_dir"] = script_dir
|
||||
|
||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||
self.config["main"]["data_dir"] = data_dir
|
||||
|
||||
self.config["fakehdhr"]["font"] = pathlib.Path(data_dir).joinpath('garamond.ttf')
|
||||
|
||||
if not self.config["main"]["cache_dir"]:
|
||||
self.config["main"]["cache_dir"] = pathlib.Path(data_dir).joinpath('cache')
|
||||
else:
|
||||
self.config["main"]["cache_dir"] = pathlib.Path(self.config["main"]["cache_dir"])
|
||||
if not self.config["main"]["cache_dir"].is_dir():
|
||||
print("Invalid Cache Directory. Exiting...")
|
||||
clean_exit()
|
||||
cache_dir = self.config["main"]["cache_dir"]
|
||||
|
||||
empty_cache = pathlib.Path(cache_dir).joinpath('empty_cache')
|
||||
self.config["empty"]["empty_cache"] = empty_cache
|
||||
if not empty_cache.is_dir():
|
||||
empty_cache.mkdir()
|
||||
self.config["empty"]["empty_cache_file"] = pathlib.Path(empty_cache).joinpath('epg.json')
|
||||
|
||||
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||
self.config["main"]["www_dir"] = www_dir
|
||||
self.config["main"]["favicon"] = pathlib.Path(www_dir).joinpath('favicon.ico')
|
||||
|
||||
def config_adjustments(self):
|
||||
|
||||
# generate UUID here for when we are not using docker
|
||||
if self.config["main"]["uuid"] is None:
|
||||
print("No UUID found. Generating one now...")
|
||||
# from https://pynative.com/python-generate-random-string/
|
||||
# create a string that wouldn't be a real device uuid for
|
||||
self.config["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||
self.write('main', 'uuid', self.config["main"]["uuid"])
|
||||
print("UUID set to: " + self.config["main"]["uuid"] + "...")
|
||||
|
||||
if not self.config["fakehdhr"]["discovery_address"]:
|
||||
if self.config["fakehdhr"]["address"] != "0.0.0.0":
|
||||
self.config["fakehdhr"]["discovery_address"] = self.config["fakehdhr"]["address"]
|
||||
|
||||
print("Server is set to run on " +
|
||||
str(self.config["fakehdhr"]["address"]) + ":" +
|
||||
str(self.config["fakehdhr"]["port"]))
|
||||
|
||||
def config_adjustments_this(self):
|
||||
self.config["proxy"] = self.config.pop(self.config["main"]["dictpopname"])
|
||||
self.config_adjustments_proxy()
|
||||
self.config_adjustments_zap2it()
|
||||
|
||||
def config_adjustments_proxy(self):
|
||||
cache_dir = self.config["main"]["cache_dir"]
|
||||
|
||||
if self.config["main"]["credentials"]:
|
||||
credentials_list = self.config["main"]["credentials"].split(",")
|
||||
creds_missing = False
|
||||
if len(credentials_list):
|
||||
for cred_item in credentials_list:
|
||||
if not self.config["proxy"][cred_item]:
|
||||
creds_missing = True
|
||||
if creds_missing:
|
||||
print(self.config["main"]["servicename"] + " Login Credentials Missing. Exiting...")
|
||||
clean_exit()
|
||||
|
||||
proxy_cache = pathlib.Path(cache_dir).joinpath('proxy')
|
||||
self.config["main"]["proxy_cache"] = proxy_cache
|
||||
if not proxy_cache.is_dir():
|
||||
proxy_cache.mkdir()
|
||||
self.config["proxy"]["sidfile"] = pathlib.Path(proxy_cache).joinpath('sid.txt')
|
||||
self.config["proxy"]["epg_cache"] = pathlib.Path(proxy_cache).joinpath('epg.json')
|
||||
proxy_web_cache = pathlib.Path(proxy_cache).joinpath('proxy_web_cache')
|
||||
self.config["main"]["proxy_web_cache"] = proxy_web_cache
|
||||
if not proxy_web_cache.is_dir():
|
||||
proxy_web_cache.mkdir()
|
||||
|
||||
def config_adjustments_zap2it(self):
|
||||
cache_dir = self.config["main"]["cache_dir"]
|
||||
|
||||
zap_cache = pathlib.Path(cache_dir).joinpath('zap2it')
|
||||
self.config["main"]["zap_cache"] = zap_cache
|
||||
if not zap_cache.is_dir():
|
||||
zap_cache.mkdir()
|
||||
self.config["zap2it"]["epg_cache"] = pathlib.Path(zap_cache).joinpath('epg.json')
|
||||
zap_web_cache = pathlib.Path(zap_cache).joinpath('zap_web_cache')
|
||||
self.config["main"]["zap_web_cache"] = zap_web_cache
|
||||
if not zap_web_cache.is_dir():
|
||||
zap_web_cache.mkdir()
|
||||
74
main.py
74
main.py
@ -1,74 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
# coding=utf-8
|
||||
import os
|
||||
import sys
|
||||
import pathlib
|
||||
import argparse
|
||||
from multiprocessing import Process
|
||||
from multiprocessing import freeze_support
|
||||
|
||||
import fhdhrconfig
|
||||
import proxyservice
|
||||
import fakehdhr
|
||||
import epghandler
|
||||
import ssdpserver
|
||||
|
||||
if sys.version_info.major == 2 or sys.version_info < (3, 3):
|
||||
print('Error: FakeHDHR requires python 3.3+.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def get_args():
|
||||
parser = argparse.ArgumentParser(description='FakeHDHR.', epilog='')
|
||||
parser.add_argument('--config_file', dest='cfg', type=str, default=None, help='')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def clean_exit():
|
||||
sys.stderr.flush()
|
||||
sys.stdout.flush()
|
||||
os._exit(0)
|
||||
from fHDHR.cli import run
|
||||
|
||||
SCRIPT_DIR = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# Gather args
|
||||
args = get_args()
|
||||
|
||||
# set to directory of script
|
||||
script_dir = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
||||
|
||||
# Open Configuration File
|
||||
print("Opening and Verifying Configuration File.")
|
||||
config = fhdhrconfig.HDHRConfig(script_dir, args)
|
||||
|
||||
# Open proxyservice
|
||||
serviceproxy = proxyservice.proxyserviceFetcher(config.config)
|
||||
|
||||
# Open EPG Handler
|
||||
epghandling = epghandler.EPGhandler(config.config, serviceproxy)
|
||||
|
||||
try:
|
||||
|
||||
print("Starting EPG thread...")
|
||||
epgServer = Process(target=epghandler.epgServerProcess, args=(config.config.copy(), epghandling))
|
||||
epgServer.start()
|
||||
|
||||
print("Starting fHDHR Interface")
|
||||
fhdhrServer = Process(target=fakehdhr.interface_start, args=(config.config.copy(), serviceproxy, epghandling))
|
||||
fhdhrServer.start()
|
||||
|
||||
if (config.config["fakehdhr"]["discovery_address"] and
|
||||
config.config["fakehdhr"]["discovery_address"] != "0.0.0.0"):
|
||||
print("Starting SSDP server...")
|
||||
ssdpServer = Process(target=ssdpserver.ssdpServerProcess, args=(config.config.copy(),))
|
||||
ssdpServer.daemon = True
|
||||
ssdpServer.start()
|
||||
else:
|
||||
ssdpServer = None
|
||||
print("Not Starting SSDP server...")
|
||||
|
||||
except KeyboardInterrupt:
|
||||
print('^C received, shutting down the server')
|
||||
epgServer.terminate()
|
||||
fhdhrServer.terminate()
|
||||
if ssdpServer:
|
||||
ssdpServer.terminate()
|
||||
clean_exit()
|
||||
freeze_support()
|
||||
sys.exit(run.main(SCRIPT_DIR))
|
||||
|
||||
@ -1,304 +0,0 @@
|
||||
import os
|
||||
import xmltodict
|
||||
import json
|
||||
import hashlib
|
||||
import datetime
|
||||
import urllib.error
|
||||
import urllib.parse
|
||||
import urllib.request
|
||||
|
||||
|
||||
def xmldictmaker(inputdict, req_items, list_items=[], str_items=[]):
|
||||
xml_dict = {}
|
||||
|
||||
for origitem in list(inputdict.keys()):
|
||||
xml_dict[origitem] = inputdict[origitem]
|
||||
|
||||
for req_item in req_items:
|
||||
if req_item not in list(inputdict.keys()):
|
||||
xml_dict[req_item] = None
|
||||
if not xml_dict[req_item]:
|
||||
if req_item in list_items:
|
||||
xml_dict[req_item] = []
|
||||
elif req_item in str_items:
|
||||
xml_dict[req_item] = ""
|
||||
|
||||
return xml_dict
|
||||
|
||||
|
||||
class NextPVR_Auth():
|
||||
config = {
|
||||
'npvrURL': '',
|
||||
'npvrSID': '',
|
||||
'npvrPIN': '',
|
||||
}
|
||||
sidfile = None
|
||||
|
||||
def __init__(self, config):
|
||||
self.sidfile = config["proxy"]["sidfile"]
|
||||
self.config["npvrPIN"] = config["proxy"]["pin"]
|
||||
self.config["npvrURL"] = ('%s%s:%s' %
|
||||
("https://" if config["proxy"]["ssl"] else "http://",
|
||||
config["proxy"]["address"],
|
||||
str(config["proxy"]["port"]),
|
||||
))
|
||||
|
||||
def _check_sid(self):
|
||||
if 'sid' not in self.config:
|
||||
if os.path.isfile(self.sidfile):
|
||||
with open(self.sidfile, 'r') as text_file:
|
||||
self.config['sid'] = text_file.read()
|
||||
print('Read SID from file.')
|
||||
else:
|
||||
self._get_sid()
|
||||
|
||||
return True
|
||||
|
||||
def _get_sid(self):
|
||||
sid = ''
|
||||
salt = ''
|
||||
clientKey = ''
|
||||
|
||||
initiate_url = "%s/service?method=session.initiate&ver=1.0&device=fhdhr" % self.config['npvrURL']
|
||||
|
||||
initiate_req = urllib.request.urlopen(initiate_url)
|
||||
initiate_dict = xmltodict.parse(initiate_req)
|
||||
|
||||
sid = initiate_dict['rsp']['sid']
|
||||
salt = initiate_dict['rsp']['salt']
|
||||
md5PIN = hashlib.md5(self.config['npvrPIN'].encode('utf-8')).hexdigest()
|
||||
string = ':%s:%s' % (md5PIN, salt)
|
||||
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||
|
||||
login_url = '%s/service?method=session.login&sid=%s&md5=%s' % (self.config['npvrURL'], sid, clientKey)
|
||||
login_req = urllib.request.urlopen(login_url)
|
||||
login_dict = xmltodict.parse(login_req)
|
||||
|
||||
if login_dict['rsp']['allow_watch'] == "true":
|
||||
self.config['sid'] = sid
|
||||
with open(self.sidfile, 'w') as text_file:
|
||||
text_file.write(self.config['sid'])
|
||||
print('Wrote SID to file.')
|
||||
else:
|
||||
print("NextPVR Login Failed")
|
||||
self.config['sid'] = ''
|
||||
|
||||
|
||||
def xmltimestamp_nextpvr(epochtime):
|
||||
xmltime = datetime.datetime.fromtimestamp(int(epochtime)/1000)
|
||||
xmltime = str(xmltime.strftime('%Y%m%d%H%M%S')) + " +0000"
|
||||
return xmltime
|
||||
|
||||
|
||||
def duration_nextpvr_minutes(starttime, endtime):
|
||||
return ((int(endtime) - int(starttime))/1000/60)
|
||||
|
||||
|
||||
class proxyserviceFetcher():
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config.copy()
|
||||
|
||||
self.epg_cache = None
|
||||
self.epg_cache_file = self.config["proxy"]["epg_cache"]
|
||||
|
||||
self.urls = {}
|
||||
self.url_assembler()
|
||||
|
||||
self.auth = NextPVR_Auth(config)
|
||||
|
||||
self.epg_cache = self.epg_cache_open()
|
||||
|
||||
def epg_cache_open(self):
|
||||
epg_cache = None
|
||||
if os.path.isfile(self.epg_cache_file):
|
||||
with open(self.epg_cache_file, 'r') as epgfile:
|
||||
epg_cache = json.load(epgfile)
|
||||
return epg_cache
|
||||
|
||||
def thumb_url(self, thumb_type, base_url, thumbnail):
|
||||
if thumb_type == "channel":
|
||||
return "http://" + str(base_url) + str(thumbnail)
|
||||
elif thumb_type == "content":
|
||||
return "http://" + str(base_url) + str(thumbnail)
|
||||
|
||||
def url_assembler(self):
|
||||
pass
|
||||
|
||||
def get_channels(self):
|
||||
self.auth._check_sid()
|
||||
|
||||
url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
self.auth.config['sid']
|
||||
))
|
||||
|
||||
r = urllib.request.urlopen(url)
|
||||
data_dict = xmltodict.parse(r)
|
||||
|
||||
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||
print("could not retrieve channel list")
|
||||
return []
|
||||
|
||||
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||
|
||||
channel_list = []
|
||||
for c in channel_o_list:
|
||||
dString = json.dumps(c)
|
||||
channel_dict = eval(dString)
|
||||
clean_station_item = {
|
||||
"name": channel_dict["name"],
|
||||
"callsign": channel_dict["name"],
|
||||
"number": channel_dict["formatted-number"],
|
||||
"id": channel_dict["id"],
|
||||
}
|
||||
channel_list.append(clean_station_item)
|
||||
return channel_list
|
||||
|
||||
def get_station_list(self, base_url):
|
||||
station_list = []
|
||||
|
||||
for c in self.get_channels():
|
||||
if self.config["fakehdhr"]["stream_type"] == "ffmpeg":
|
||||
watchtype = "ffmpeg"
|
||||
else:
|
||||
watchtype = "direct"
|
||||
url = ('%s%s/watch?method=%s&channel=%s' %
|
||||
("http://",
|
||||
base_url,
|
||||
watchtype,
|
||||
c['number']
|
||||
))
|
||||
station_list.append(
|
||||
{
|
||||
'GuideNumber': str(c['number']),
|
||||
'GuideName': c['name'],
|
||||
'URL': url
|
||||
})
|
||||
return station_list
|
||||
|
||||
def get_station_total(self):
|
||||
total_channels = 0
|
||||
for c in self.get_channels():
|
||||
total_channels += 1
|
||||
return total_channels
|
||||
|
||||
def get_channel_stream(self, id):
|
||||
url = ('%s%s:%s/live?channel=%s&client=%s' %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
str(id),
|
||||
str(id),
|
||||
))
|
||||
return url
|
||||
|
||||
def get_channel_streams(self):
|
||||
streamdict = {}
|
||||
for c in self.get_channels():
|
||||
url = ('%s%s:%s/live?channel=%s&client=%s' %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
str(c["number"]),
|
||||
str(c["number"]),
|
||||
))
|
||||
streamdict[str(c["number"])] = url
|
||||
return streamdict
|
||||
|
||||
def get_channel_thumbnail(self, channel_id):
|
||||
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
str(channel_id)
|
||||
))
|
||||
return channel_thumb_url
|
||||
|
||||
def get_content_thumbnail(self, content_id):
|
||||
self.auth._check_sid()
|
||||
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
self.auth.config['sid'],
|
||||
str(content_id)
|
||||
))
|
||||
return item_thumb_url
|
||||
|
||||
def update_epg(self):
|
||||
print('Updating NextPVR EPG cache file.')
|
||||
self.auth._check_sid()
|
||||
|
||||
programguide = {}
|
||||
|
||||
for c in self.get_channels():
|
||||
|
||||
cdict = xmldictmaker(c, ["callsign", "name", "number", "id"])
|
||||
|
||||
if str(cdict['number']) not in list(programguide.keys()):
|
||||
|
||||
programguide[str(cdict['number'])] = {
|
||||
"callsign": cdict["callsign"],
|
||||
"name": cdict["name"] or cdict["callsign"],
|
||||
"number": cdict["number"],
|
||||
"id": cdict["id"],
|
||||
"thumbnail": ("/images?source=proxy&type=channel&id=%s" % (str(cdict['id']))),
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
||||
("https://" if self.config["proxy"]["ssl"] else "http://",
|
||||
self.config["proxy"]["address"],
|
||||
str(self.config["proxy"]["port"]),
|
||||
str(cdict["id"]),
|
||||
))
|
||||
epg_req = urllib.request.urlopen(epg_url)
|
||||
epg_dict = xmltodict.parse(epg_req)
|
||||
|
||||
for program_listing in epg_dict["rsp"]["listings"]:
|
||||
for program_item in epg_dict["rsp"]["listings"][program_listing]:
|
||||
if not isinstance(program_item, str):
|
||||
|
||||
progdict = xmldictmaker(program_item, ["start", "end", "title", "name", "subtitle", "rating", "description", "season", "episode", "id", "episodeTitle"])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": xmltimestamp_nextpvr(progdict["start"]),
|
||||
"time_end": xmltimestamp_nextpvr(progdict["end"]),
|
||||
"duration_minutes": duration_nextpvr_minutes(progdict["start"], progdict["end"]),
|
||||
"thumbnail": ("/images?source=proxy&type=content&id=%s" % (str(progdict['id']))),
|
||||
"title": progdict['name'] or "Unavailable",
|
||||
"sub-title": progdict['subtitle'] or "Unavailable",
|
||||
"description": progdict['description'] or "Unavailable",
|
||||
"rating": progdict['rating'] or "N/A",
|
||||
"episodetitle": progdict['episodeTitle'],
|
||||
"releaseyear": None,
|
||||
"genres": [],
|
||||
"seasonnumber": progdict['season'],
|
||||
"episodenumber": progdict['episode'],
|
||||
"isnew": False,
|
||||
"id": progdict['id'] or xmltimestamp_nextpvr(progdict["start"]),
|
||||
}
|
||||
|
||||
if 'genre' in list(progdict.keys()):
|
||||
clean_prog_dict["genres"] = progdict['genre'].split(",")
|
||||
|
||||
if clean_prog_dict['sub-title'].startswith("Movie:"):
|
||||
clean_prog_dict['releaseyear'] = clean_prog_dict['sub-title'].split("Movie: ")[-1]
|
||||
clean_prog_dict['sub-title'] = "Unavailable"
|
||||
clean_prog_dict["genres"].append("Movie")
|
||||
|
||||
# TODO isNEW
|
||||
|
||||
programguide[str(cdict["number"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
for cnum in programguide:
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
self.epg_cache = programguide
|
||||
with open(self.epg_cache_file, 'w') as epgfile:
|
||||
epgfile.write(json.dumps(programguide, indent=4))
|
||||
print('Wrote updated NextPVR EPG cache file.')
|
||||
return programguide
|
||||
Loading…
Reference in New Issue
Block a user