mirror of
https://github.com/fHDHR/fHDHR_NextPVR.git
synced 2025-12-06 11:07:01 -05:00
test
This commit is contained in:
parent
567383f4ee
commit
6baecab960
23
README.md
23
README.md
@ -1 +1,24 @@
|
|||||||
# FakeHDHR_NextPVR
|
# FakeHDHR_NextPVR
|
||||||
|
|
||||||
|
(based off of original code from
|
||||||
|
|
||||||
|
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||||
|
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||||
|
* myself coding for locast2plex
|
||||||
|
|
||||||
|
)
|
||||||
|
|
||||||
|
Until I have time to do the wiki thing for this project, instructions will be in this `README.md`.
|
||||||
|
|
||||||
|
PRs welcome for:
|
||||||
|
|
||||||
|
* Docker support
|
||||||
|
|
||||||
|
|
||||||
|
Vague Instructions (specific details intentionally excluded):
|
||||||
|
|
||||||
|
* Install ffmpeg, and verify it is accessible in PATH. Otherwise, you may specify it's path in your configuration later.
|
||||||
|
* Install Python3 and Python3-pip. There will be no support for Python2.
|
||||||
|
* Download the zip of the `master` branch, or `git clone`.
|
||||||
|
* `pip3 install -r requirements.txt`
|
||||||
|
* Copy the included configuration example to a known path, and adjust as needed. The script will look in the current directory for `config.ini`, but this can be specified with the commandline argument `--config_file=`
|
||||||
|
|||||||
0
data/cache/PLACEHOLDER
vendored
Normal file
0
data/cache/PLACEHOLDER
vendored
Normal file
BIN
data/garamond.ttf
Normal file
BIN
data/garamond.ttf
Normal file
Binary file not shown.
BIN
data/www/favicon.ico
Normal file
BIN
data/www/favicon.ico
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 15 KiB |
BIN
data/www/images/default-channel-thumb.png
Normal file
BIN
data/www/images/default-channel-thumb.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.4 KiB |
BIN
data/www/images/default-content-thumb.png
Normal file
BIN
data/www/images/default-content-thumb.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 1.4 KiB |
252
epghandler/__init__.py
Normal file
252
epghandler/__init__.py
Normal file
@ -0,0 +1,252 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
from io import BytesIO
|
||||||
|
import json
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
|
||||||
|
from . import zap2it
|
||||||
|
|
||||||
|
|
||||||
|
def sub_el(parent, name, text=None, **kwargs):
|
||||||
|
el = ET.SubElement(parent, name, **kwargs)
|
||||||
|
if text:
|
||||||
|
el.text = text
|
||||||
|
return el
|
||||||
|
|
||||||
|
|
||||||
|
def clean_exit():
|
||||||
|
sys.stderr.flush()
|
||||||
|
sys.stdout.flush()
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
class EPGhandler():
|
||||||
|
|
||||||
|
def __init__(self, config, serviceproxy):
|
||||||
|
self.config = config.config
|
||||||
|
self.serviceproxy = serviceproxy
|
||||||
|
self.zapepg = zap2it.ZapEPG(config)
|
||||||
|
|
||||||
|
self.epg_cache = None
|
||||||
|
|
||||||
|
self.empty_cache_dir = config.config["main"]["empty_cache"]
|
||||||
|
self.empty_cache_file = config.config["main"]["empty_cache_file"]
|
||||||
|
|
||||||
|
def get_epg(self):
|
||||||
|
if self.config["fakehdhr"]["epg_method"] == "empty":
|
||||||
|
epgdict = self.epg_cache_open()
|
||||||
|
elif self.config["fakehdhr"]["epg_method"] == "proxy":
|
||||||
|
epgdict = self.serviceproxy.epg_cache_open()
|
||||||
|
elif self.config["fakehdhr"]["epg_method"] == "zap2it":
|
||||||
|
epgdict = self.zapepg.epg_cache_open()
|
||||||
|
return epgdict
|
||||||
|
|
||||||
|
def epg_cache_open(self):
|
||||||
|
epg_cache = None
|
||||||
|
if os.path.isfile(self.empty_cache_file):
|
||||||
|
with open(self.empty_cache_file, 'r') as epgfile:
|
||||||
|
epg_cache = json.load(epgfile)
|
||||||
|
return epg_cache
|
||||||
|
|
||||||
|
def get_xmltv(self, base_url):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
if not epgdict:
|
||||||
|
return self.dummyxml()
|
||||||
|
|
||||||
|
epg_method = self.config["fakehdhr"]["epg_method"]
|
||||||
|
|
||||||
|
out = ET.Element('tv')
|
||||||
|
out.set('source-info-url', 'NextPVR')
|
||||||
|
out.set('source-info-name', 'NextPVR')
|
||||||
|
out.set('generator-info-name', 'FAKEHDHR')
|
||||||
|
out.set('generator-info-url', 'FAKEHDHR/FakeHDHR_NextPVR')
|
||||||
|
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
c_out = sub_el(out, 'channel', id=epgdict[channel]['id'])
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s' % (epgdict[channel]['number'], epgdict[channel]['callsign']))
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[channel]['number'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[channel]['callsign'])
|
||||||
|
|
||||||
|
if epg_method == "empty":
|
||||||
|
sub_el(c_out, 'icon', src=("http://" + str(base_url) + str(epgdict[channel]['thumbnail'])))
|
||||||
|
elif epg_method == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=("http://" + str(base_url) + str(epgdict[channel]['thumbnail'])))
|
||||||
|
elif epg_method == "zap2it":
|
||||||
|
sub_el(c_out, 'icon', src=(str(epgdict[channel]['thumbnail'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(str(epgdict[channel]['thumbnail'])))
|
||||||
|
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
channel_listing = epgdict[channel]['listing']
|
||||||
|
|
||||||
|
for program in channel_listing:
|
||||||
|
|
||||||
|
prog_out = sub_el(out, 'programme',
|
||||||
|
start=program['time_start'],
|
||||||
|
stop=program['time_end'],
|
||||||
|
channel=epgdict[channel]["id"])
|
||||||
|
|
||||||
|
if program['title']:
|
||||||
|
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||||
|
|
||||||
|
if 'movie' in program['genres'] and program['releaseyear']:
|
||||||
|
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['releaseyear'])
|
||||||
|
elif program['episodetitle']:
|
||||||
|
sub_el(prog_out, 'sub-title', lang='en', text=program['episodetitle'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||||
|
|
||||||
|
for f in program['genres']:
|
||||||
|
sub_el(prog_out, 'category', lang='en', text=f)
|
||||||
|
sub_el(prog_out, 'genre', lang='en', text=f)
|
||||||
|
|
||||||
|
if program["thumbnail"] is not None:
|
||||||
|
if epg_method == "empty":
|
||||||
|
sub_el(prog_out, 'icon', src=("http://" + str(base_url) + str(program['thumbnail'])))
|
||||||
|
elif epg_method == "proxy":
|
||||||
|
sub_el(prog_out, 'icon', src=("http://" + str(base_url) + str(program['thumbnail'])))
|
||||||
|
elif epg_method == "zap2it":
|
||||||
|
sub_el(prog_out, 'icon', src=(str(program['thumbnail'])))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(str(program['thumbnail'])))
|
||||||
|
|
||||||
|
if program['rating']:
|
||||||
|
r = ET.SubElement(prog_out, 'rating')
|
||||||
|
sub_el(r, 'value', text=program['rating'])
|
||||||
|
|
||||||
|
if 'seasonnumber' in list(program.keys()) and 'episodenumber' in list(program.keys()):
|
||||||
|
if program['seasonnumber'] and program['episodenumber']:
|
||||||
|
s_ = int(program['seasonnumber'], 10)
|
||||||
|
e_ = int(program['episodenumber'], 10)
|
||||||
|
sub_el(prog_out, 'episode-num', system='common',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||||
|
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||||
|
sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
|
||||||
|
# if 'New' in event['flag'] and 'live' not in event['flag']:
|
||||||
|
# sub_el(prog_out, 'new')
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||||
|
return fakefile.getvalue()
|
||||||
|
|
||||||
|
def dummyxml(self):
|
||||||
|
out = ET.Element('tv')
|
||||||
|
out.set('source-info-url', 'NextPVR')
|
||||||
|
out.set('source-info-name', 'NextPVR')
|
||||||
|
out.set('generator-info-name', 'FAKEHDHR')
|
||||||
|
out.set('generator-info-url', 'FAKEHDHR/FakeHDHR_NextPVR')
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||||
|
return fakefile.getvalue()
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
print('Updating Empty EPG cache file.')
|
||||||
|
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
timestamps = []
|
||||||
|
todaydate = datetime.date.today()
|
||||||
|
for x in range(0, 6):
|
||||||
|
xdate = todaydate + datetime.timedelta(days=x)
|
||||||
|
xtdate = xdate + datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
for hour in range(0, 24):
|
||||||
|
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
|
||||||
|
if hour + 1 < 24:
|
||||||
|
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
|
||||||
|
else:
|
||||||
|
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
|
||||||
|
for c in self.serviceproxy.get_channels():
|
||||||
|
if str(c["formatted-number"]) not in list(programguide.keys()):
|
||||||
|
programguide[str(c["formatted-number"])] = {}
|
||||||
|
|
||||||
|
channel_thumb_path = ("/images?source=empty&type=channel&id=%s" % (str(c['formatted-number'])))
|
||||||
|
programguide[str(c["formatted-number"])]["thumbnail"] = channel_thumb_path
|
||||||
|
|
||||||
|
if "name" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["name"] = c["name"]
|
||||||
|
|
||||||
|
if "callsign" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["callsign"] = c["name"]
|
||||||
|
|
||||||
|
if "id" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["id"] = c["id"]
|
||||||
|
|
||||||
|
if "number" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["number"] = c["formatted-number"]
|
||||||
|
|
||||||
|
if "listing" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["listing"] = []
|
||||||
|
|
||||||
|
for timestamp in timestamps:
|
||||||
|
clean_prog_dict = {}
|
||||||
|
|
||||||
|
clean_prog_dict["time_start"] = timestamp['time_start']
|
||||||
|
clean_prog_dict["time_end"] = timestamp['time_end']
|
||||||
|
clean_prog_dict["duration_minutes"] = 60.0
|
||||||
|
|
||||||
|
content_thumb = ("/images?source=empty&type=content&id=%s" % (str(c['formatted-number'])))
|
||||||
|
clean_prog_dict["thumbnail"] = content_thumb
|
||||||
|
|
||||||
|
clean_prog_dict["title"] = "Unavailable"
|
||||||
|
|
||||||
|
clean_prog_dict["genres"] = []
|
||||||
|
|
||||||
|
clean_prog_dict["sub-title"] = "Unavailable"
|
||||||
|
|
||||||
|
clean_prog_dict['releaseyear'] = ""
|
||||||
|
clean_prog_dict["episodetitle"] = "Unavailable"
|
||||||
|
|
||||||
|
clean_prog_dict["description"] = "Unavailable"
|
||||||
|
|
||||||
|
clean_prog_dict['rating'] = "N/A"
|
||||||
|
|
||||||
|
programguide[str(c["formatted-number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
self.epg_cache = programguide
|
||||||
|
with open(self.empty_cache_file, 'w') as epgfile:
|
||||||
|
epgfile.write(json.dumps(programguide, indent=4))
|
||||||
|
print('Wrote updated Empty EPG cache file.')
|
||||||
|
return programguide
|
||||||
|
|
||||||
|
def update(self):
|
||||||
|
if self.config["fakehdhr"]["epg_method"] == "empty":
|
||||||
|
self.update_epg()
|
||||||
|
elif self.config["fakehdhr"]["epg_method"] == "proxy":
|
||||||
|
self.serviceproxy.update_epg()
|
||||||
|
elif self.config["fakehdhr"]["epg_method"] == "zap2it":
|
||||||
|
self.zapepg.update_epg()
|
||||||
|
|
||||||
|
|
||||||
|
def epgServerProcess(config, epghandling):
|
||||||
|
|
||||||
|
if config.config["fakehdhr"]["epg_method"] == "empty":
|
||||||
|
sleeptime = config.config["main"]["empty_epg_update_frequency"]
|
||||||
|
elif config.config["fakehdhr"]["epg_method"] == "proxy":
|
||||||
|
sleeptime = config.config["nextpvr"]["epg_update_frequency"]
|
||||||
|
elif config.config["fakehdhr"]["epg_method"] == "zap2it":
|
||||||
|
sleeptime = config.config["zap2xml"]["epg_update_frequency"]
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
while True:
|
||||||
|
epghandling.update()
|
||||||
|
time.sleep(sleeptime)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
clean_exit()
|
||||||
203
epghandler/zap2it.py
Normal file
203
epghandler/zap2it.py
Normal file
@ -0,0 +1,203 @@
|
|||||||
|
import os
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
|
||||||
|
def xmltimestamp_zap(inputtime):
|
||||||
|
xmltime = inputtime.replace('Z', '+00:00')
|
||||||
|
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||||
|
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||||
|
return xmltime
|
||||||
|
|
||||||
|
|
||||||
|
def duration_nextpvr_minutes(starttime, endtime):
|
||||||
|
return ((int(endtime) - int(starttime))/1000/60)
|
||||||
|
|
||||||
|
|
||||||
|
class ZapEPG():
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
|
||||||
|
self.config = config.config
|
||||||
|
self.postalcode = config.config["zap2xml"]["postalcode"]
|
||||||
|
if not self.postalcode:
|
||||||
|
self.postalcode = self.get_location()
|
||||||
|
|
||||||
|
self.epg_cache = None
|
||||||
|
self.cache_dir = config.config["main"]["zap_web_cache"]
|
||||||
|
self.epg_cache_file = config.config["zap2xml"]["epg_cache"]
|
||||||
|
self.epg_cache = self.epg_cache_open()
|
||||||
|
|
||||||
|
def get_location(self):
|
||||||
|
url = 'http://ipinfo.io/json'
|
||||||
|
response = urllib.request.urlopen(url)
|
||||||
|
data = json.load(response)
|
||||||
|
return data["postal"]
|
||||||
|
|
||||||
|
def epg_cache_open(self):
|
||||||
|
epg_cache = None
|
||||||
|
if os.path.isfile(self.epg_cache_file):
|
||||||
|
with open(self.epg_cache_file, 'r') as epgfile:
|
||||||
|
epg_cache = json.load(epgfile)
|
||||||
|
return epg_cache
|
||||||
|
|
||||||
|
def get_cached(self, cache_key, delay, url):
|
||||||
|
cache_path = self.cache_dir.joinpath(cache_key)
|
||||||
|
if cache_path.is_file():
|
||||||
|
print('FROM CACHE:', str(cache_path))
|
||||||
|
with open(cache_path, 'rb') as f:
|
||||||
|
return f.read()
|
||||||
|
else:
|
||||||
|
print('Fetching: ', url)
|
||||||
|
try:
|
||||||
|
resp = urllib.request.urlopen(url)
|
||||||
|
result = resp.read()
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
if e.code == 400:
|
||||||
|
print('Got a 400 error! Ignoring it.')
|
||||||
|
result = (
|
||||||
|
b'{'
|
||||||
|
b'"note": "Got a 400 error at this time, skipping.",'
|
||||||
|
b'"channels": []'
|
||||||
|
b'}')
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
with open(cache_path, 'wb') as f:
|
||||||
|
f.write(result)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def remove_stale_cache(self, todaydate):
|
||||||
|
for p in self.cache_dir.glob('*'):
|
||||||
|
try:
|
||||||
|
cachedate = datetime.datetime.strptime(str(p.name), "%Y-%m-%d")
|
||||||
|
todaysdate = datetime.datetime.strptime(str(todaydate), "%Y-%m-%d")
|
||||||
|
if cachedate >= todaysdate:
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
pass
|
||||||
|
print('Removing stale cache file:', p.name)
|
||||||
|
p.unlink()
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
print('Updating Zap2it EPG cache file.')
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
||||||
|
zap_time = time.mktime(time.localtime())
|
||||||
|
zap_time_window = int(self.config["zap2xml"]["timespan"]) * 3600
|
||||||
|
zap_time = int(zap_time - (zap_time % zap_time_window))
|
||||||
|
|
||||||
|
# Fetch data in `zap_timespan` chunks.
|
||||||
|
for i in range(int(7 * 24 / int(self.config["zap2xml"]["timespan"]))):
|
||||||
|
i_time = zap_time + (i * zap_time_window)
|
||||||
|
|
||||||
|
parameters = {
|
||||||
|
'aid': self.config["zap2xml"]['affiliate_id'],
|
||||||
|
'country': self.config["zap2xml"]['country'],
|
||||||
|
'device': self.config["zap2xml"]['device'],
|
||||||
|
'headendId': self.config["zap2xml"]['headendid'],
|
||||||
|
'isoverride': "true",
|
||||||
|
'languagecode': self.config["zap2xml"]['languagecode'],
|
||||||
|
'pref': 'm,p',
|
||||||
|
'timespan': self.config["zap2xml"]['timespan'],
|
||||||
|
'timezone': self.config["zap2xml"]['timezone'],
|
||||||
|
'userId': self.config["zap2xml"]['userid'],
|
||||||
|
'postalCode': self.config["zap2xml"]['postalcode'],
|
||||||
|
'lineupId': '%s-%s-DEFAULT' % (self.config["zap2xml"]['country'], self.config["zap2xml"]['device']),
|
||||||
|
'time': i_time,
|
||||||
|
'Activity_ID': 1,
|
||||||
|
'FromPage': "TV%20Guide",
|
||||||
|
}
|
||||||
|
|
||||||
|
url = 'https://tvlistings.zap2it.com/api/grid?'
|
||||||
|
url += urllib.parse.urlencode(parameters)
|
||||||
|
|
||||||
|
result = self.get_cached(str(i_time), self.config["zap2xml"]['delay'], url)
|
||||||
|
d = json.loads(result)
|
||||||
|
|
||||||
|
for c in d['channels']:
|
||||||
|
|
||||||
|
if str(c['channelNo']) not in list(programguide.keys()):
|
||||||
|
programguide[str(c['channelNo'])] = {}
|
||||||
|
|
||||||
|
channel_thumb = str(c['thumbnail']).replace("//", "https://").split("?")[0]
|
||||||
|
programguide[str(c["channelNo"])]["thumbnail"] = channel_thumb
|
||||||
|
|
||||||
|
if "name" not in list(programguide[str(c["channelNo"])].keys()):
|
||||||
|
programguide[str(c["channelNo"])]["name"] = c["callSign"]
|
||||||
|
|
||||||
|
if "callsign" not in list(programguide[str(c["channelNo"])].keys()):
|
||||||
|
programguide[str(c["channelNo"])]["callsign"] = c["callSign"]
|
||||||
|
|
||||||
|
if "id" not in list(programguide[str(c["channelNo"])].keys()):
|
||||||
|
programguide[str(c["channelNo"])]["id"] = c["channelId"]
|
||||||
|
|
||||||
|
if "number" not in list(programguide[str(c["channelNo"])].keys()):
|
||||||
|
programguide[str(c["channelNo"])]["number"] = c["channelNo"]
|
||||||
|
|
||||||
|
if "listing" not in list(programguide[str(c["channelNo"])].keys()):
|
||||||
|
programguide[str(c["channelNo"])]["listing"] = []
|
||||||
|
|
||||||
|
for event in c['events']:
|
||||||
|
clean_prog_dict = {}
|
||||||
|
|
||||||
|
prog_in = event['program']
|
||||||
|
|
||||||
|
clean_prog_dict["time_start"] = xmltimestamp_zap(event['startTime'])
|
||||||
|
clean_prog_dict["time_end"] = xmltimestamp_zap(event['endTime'])
|
||||||
|
clean_prog_dict["duration_minutes"] = event['duration']
|
||||||
|
|
||||||
|
content_thumb = str("https://zap2it.tmsimg.com/assets/" + str(event['thumbnail']) + ".jpg")
|
||||||
|
clean_prog_dict["thumbnail"] = content_thumb
|
||||||
|
|
||||||
|
if 'title' not in list(prog_in.keys()):
|
||||||
|
prog_in["title"] = "Unavailable"
|
||||||
|
elif not prog_in["title"]:
|
||||||
|
prog_in["title"] = "Unavailable"
|
||||||
|
clean_prog_dict["title"] = prog_in["title"]
|
||||||
|
|
||||||
|
clean_prog_dict["genres"] = []
|
||||||
|
if 'filter' in list(event.keys()):
|
||||||
|
for f in event['filter']:
|
||||||
|
clean_prog_dict["genres"].append(f.replace('filter-', ''))
|
||||||
|
|
||||||
|
if 'filter-movie' in event['filter'] and prog_in['releaseYear']:
|
||||||
|
clean_prog_dict["sub-title"] = 'Movie: ' + prog_in['releaseYear']
|
||||||
|
elif prog_in['episodeTitle']:
|
||||||
|
clean_prog_dict["sub-title"] = prog_in['episodeTitle']
|
||||||
|
else:
|
||||||
|
clean_prog_dict["sub-title"] = "Unavailable"
|
||||||
|
|
||||||
|
clean_prog_dict['releaseyear'] = prog_in['releaseYear']
|
||||||
|
|
||||||
|
if prog_in['shortDesc'] is None:
|
||||||
|
prog_in['shortDesc'] = "Unavailable"
|
||||||
|
clean_prog_dict["description"] = prog_in['shortDesc']
|
||||||
|
|
||||||
|
if 'rating' not in list(event.keys()):
|
||||||
|
event['rating'] = "N/A"
|
||||||
|
clean_prog_dict['rating'] = event['rating']
|
||||||
|
|
||||||
|
if 'season' in list(prog_in.keys()) and 'episode' in list(prog_in.keys()):
|
||||||
|
clean_prog_dict["seasonnumber"] = prog_in['season']
|
||||||
|
clean_prog_dict["episodenumber"] = prog_in['episode']
|
||||||
|
clean_prog_dict["episodetitle"] = clean_prog_dict["sub-title"]
|
||||||
|
else:
|
||||||
|
if "movie" not in clean_prog_dict["genres"]:
|
||||||
|
clean_prog_dict["episodetitle"] = clean_prog_dict["sub-title"]
|
||||||
|
|
||||||
|
if 'New' in event['flag'] and 'live' not in event['flag']:
|
||||||
|
clean_prog_dict["isnew"] = True
|
||||||
|
|
||||||
|
programguide[str(c["channelNo"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
self.epg_cache = programguide
|
||||||
|
with open(self.epg_cache_file, 'w') as epgfile:
|
||||||
|
epgfile.write(json.dumps(programguide, indent=4))
|
||||||
|
print('Wrote updated Zap2it EPG cache file.')
|
||||||
|
return programguide
|
||||||
362
fakehdhr/__init__.py
Normal file
362
fakehdhr/__init__.py
Normal file
@ -0,0 +1,362 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import (Flask, send_from_directory, request, Response,
|
||||||
|
abort, stream_with_context)
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree as ET
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import requests
|
||||||
|
import subprocess
|
||||||
|
import errno
|
||||||
|
import PIL.Image
|
||||||
|
import PIL.ImageDraw
|
||||||
|
import PIL.ImageFont
|
||||||
|
|
||||||
|
|
||||||
|
def sub_el(parent, name, text=None, **kwargs):
|
||||||
|
el = ET.SubElement(parent, name, **kwargs)
|
||||||
|
if text:
|
||||||
|
el.text = text
|
||||||
|
return el
|
||||||
|
|
||||||
|
|
||||||
|
def getSize(txt, font):
|
||||||
|
testImg = PIL.Image.new('RGB', (1, 1))
|
||||||
|
testDraw = PIL.ImageDraw.Draw(testImg)
|
||||||
|
return testDraw.textsize(txt, font)
|
||||||
|
|
||||||
|
|
||||||
|
class HDHR_Hub():
|
||||||
|
config = None
|
||||||
|
serviceproxy = None
|
||||||
|
epghandling = None
|
||||||
|
station_scan = False
|
||||||
|
station_list = []
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.station_scan = False
|
||||||
|
|
||||||
|
def get_xmltv(self, base_url):
|
||||||
|
return self.epghandling.get_xmltv(base_url)
|
||||||
|
|
||||||
|
def get_image(self, req_args):
|
||||||
|
imageid = req_args["id"]
|
||||||
|
|
||||||
|
if req_args["source"] == "proxy":
|
||||||
|
if req_args["type"] == "channel":
|
||||||
|
imageUri = self.serviceproxy.get_channel_thumbnail(imageid)
|
||||||
|
elif req_args["type"] == "content":
|
||||||
|
imageUri = self.serviceproxy.get_content_thumbnail(imageid)
|
||||||
|
req = requests.get(imageUri)
|
||||||
|
return req.content
|
||||||
|
|
||||||
|
elif req_args["source"] == "empty":
|
||||||
|
if req_args["type"] == "channel":
|
||||||
|
width = 360
|
||||||
|
height = 270
|
||||||
|
text = req_args["id"]
|
||||||
|
fontsize = 72
|
||||||
|
elif req_args["type"] == "content":
|
||||||
|
width = 1080
|
||||||
|
height = 1440
|
||||||
|
fontsize = 100
|
||||||
|
text = req_args["id"]
|
||||||
|
|
||||||
|
colorBackground = "#228822"
|
||||||
|
colorText = "#717D7E"
|
||||||
|
colorOutline = "#717D7E"
|
||||||
|
fontname = str(self.config["fakehdhr"]["font"])
|
||||||
|
|
||||||
|
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||||
|
text_width, text_height = getSize(text, font)
|
||||||
|
img = PIL.Image.new('RGBA', (width+4, height+4), colorBackground)
|
||||||
|
d = PIL.ImageDraw.Draw(img)
|
||||||
|
d.text(((width-text_width)/2, (height-text_height)/2), text, fill=colorText, font=font)
|
||||||
|
d.rectangle((0, 0, width+3, height+3), outline=colorOutline)
|
||||||
|
|
||||||
|
s = BytesIO()
|
||||||
|
img.save(s, 'png')
|
||||||
|
return s.getvalue()
|
||||||
|
|
||||||
|
def get_xmldiscover(self, base_url):
|
||||||
|
out = ET.Element('root')
|
||||||
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
|
sub_el(out, 'URLBase', "http://" + base_url)
|
||||||
|
|
||||||
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
|
sub_el(specVersion_out, 'major', "1")
|
||||||
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
|
device_out = sub_el(out, 'device')
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
|
sub_el(device_out, 'friendlyName', self.config["fakehdhr"]["friendlyname"])
|
||||||
|
sub_el(device_out, 'manufacturer', "Silicondust")
|
||||||
|
sub_el(device_out, 'modelName', self.config["dev"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.config["dev"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
sub_el(device_out, 'UDN', "uuid:" + self.config["main"]["uuid"])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||||
|
return fakefile.getvalue()
|
||||||
|
|
||||||
|
def get_discover_json(self, base_url):
|
||||||
|
jsondiscover = {
|
||||||
|
"FriendlyName": self.config["fakehdhr"]["friendlyname"],
|
||||||
|
"Manufacturer": "Silicondust",
|
||||||
|
"ModelNumber": self.config["dev"]["reporting_model"],
|
||||||
|
"FirmwareName": self.config["dev"]["reporting_firmware_name"],
|
||||||
|
"TunerCount": self.config["fakehdhr"]["tuner_count"],
|
||||||
|
"FirmwareVersion": self.config["dev"]["reporting_firmware_ver"],
|
||||||
|
"DeviceID": self.config["main"]["uuid"],
|
||||||
|
"DeviceAuth": "nextpvrproxy",
|
||||||
|
"BaseURL": "http://" + base_url,
|
||||||
|
"LineupURL": "http://" + base_url + "/lineup.json"
|
||||||
|
}
|
||||||
|
return jsondiscover
|
||||||
|
|
||||||
|
def get_lineup_status(self):
|
||||||
|
if self.station_scan:
|
||||||
|
channel_count = self.serviceproxy.get_station_total()
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": channel_count
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.config["dev"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.config["dev"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def get_lineup_xml(self, base_url):
|
||||||
|
out = ET.Element('Lineup')
|
||||||
|
station_list = self.serviceproxy.get_station_list(base_url)
|
||||||
|
for station_item in station_list:
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
|
||||||
|
sub_el(program_out, 'GuideName', station_item['GuideName'])
|
||||||
|
sub_el(program_out, 'URL', station_item['URL'])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(ET.tostring(out, encoding='UTF-8'))
|
||||||
|
return fakefile.getvalue()
|
||||||
|
|
||||||
|
def get_debug(self, base_url):
|
||||||
|
debugjson = {
|
||||||
|
"base_url": base_url,
|
||||||
|
}
|
||||||
|
return debugjson
|
||||||
|
|
||||||
|
def get_html_error(self, message):
|
||||||
|
htmlerror = """<html>
|
||||||
|
<head></head>
|
||||||
|
<body>
|
||||||
|
<h2>{}</h2>
|
||||||
|
</body>
|
||||||
|
</html>"""
|
||||||
|
return htmlerror.format(message)
|
||||||
|
|
||||||
|
def station_scan_change(self, enablement):
|
||||||
|
self.station_scan = enablement
|
||||||
|
|
||||||
|
|
||||||
|
hdhr = HDHR_Hub()
|
||||||
|
|
||||||
|
|
||||||
|
class HDHR_HTTP_Server():
|
||||||
|
app = Flask(__name__,)
|
||||||
|
|
||||||
|
@app.route('/')
|
||||||
|
def root_path():
|
||||||
|
return hdhr.config["fakehdhr"]["friendlyname"]
|
||||||
|
|
||||||
|
@app.route('/favicon.ico', methods=['GET'])
|
||||||
|
def favicon():
|
||||||
|
return send_from_directory(hdhr.config["main"]["www_dir"],
|
||||||
|
'favicon.ico',
|
||||||
|
mimetype='image/vnd.microsoft.icon')
|
||||||
|
|
||||||
|
@app.route('/device.xml', methods=['GET'])
|
||||||
|
def device_xml():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
devicexml = hdhr.get_xmldiscover(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=devicexml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
@app.route('/discover.json', methods=['GET'])
|
||||||
|
def discover_json():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
jsondiscover = hdhr.get_discover_json(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=json.dumps(jsondiscover, indent=4),
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
@app.route('/lineup_status.json', methods=['GET'])
|
||||||
|
def lineup_status_json():
|
||||||
|
linup_status_json = hdhr.get_lineup_status()
|
||||||
|
return Response(status=200,
|
||||||
|
response=json.dumps(linup_status_json, indent=4),
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
@app.route('/lineup.xml', methods=['GET'])
|
||||||
|
def lineup_xml():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
lineupxml = hdhr.get_lineup_xml(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineupxml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
@app.route('/lineup.json', methods=['GET'])
|
||||||
|
def lineup_json():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
station_list = hdhr.serviceproxy.get_station_list(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=json.dumps(station_list, indent=4),
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
@app.route('/xmltv.xml', methods=['GET'])
|
||||||
|
def xmltv_xml():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
xmltv = hdhr.get_xmltv(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=xmltv,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
@app.route('/debug.json', methods=['GET'])
|
||||||
|
def debug_json():
|
||||||
|
base_url = request.headers["host"]
|
||||||
|
debugreport = hdhr.get_debug(base_url)
|
||||||
|
return Response(status=200,
|
||||||
|
response=json.dumps(debugreport, indent=4),
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
@app.route('/images', methods=['GET'])
|
||||||
|
def images_nothing():
|
||||||
|
if ('source' not in list(request.args.keys()) or 'id' not in list(request.args.keys()) or 'type' not in list(request.args.keys())):
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
image = hdhr.get_image(request.args)
|
||||||
|
return Response(image, content_type='image/png', direct_passthrough=True)
|
||||||
|
|
||||||
|
@app.route('/watch', methods=['GET'])
|
||||||
|
def watch_nothing():
|
||||||
|
if 'method' in list(request.args.keys()):
|
||||||
|
if 'channel' in list(request.args.keys()):
|
||||||
|
|
||||||
|
station_list = hdhr.serviceproxy.get_channel_streams()
|
||||||
|
channelUri = station_list[str(request.args["channel"])]
|
||||||
|
if not channelUri:
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
if request.args["method"] == "direct":
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
|
if not duration == 0:
|
||||||
|
duration += time.time()
|
||||||
|
|
||||||
|
req = requests.get(channelUri, stream=True)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
yield ''
|
||||||
|
for chunk in req.iter_content(chunk_size=hdhr.config["direct_stream"]['chunksize']):
|
||||||
|
if not duration == 0 and not time.time() < duration:
|
||||||
|
req.close()
|
||||||
|
break
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
return Response(generate(), content_type=req.headers['content-type'], direct_passthrough=True)
|
||||||
|
|
||||||
|
if request.args["method"] == "ffmpeg":
|
||||||
|
|
||||||
|
ffmpeg_command = [hdhr.config["ffmpeg"]["ffmpeg_path"],
|
||||||
|
"-i", channelUri,
|
||||||
|
"-c", "copy",
|
||||||
|
"-f", "mpegts",
|
||||||
|
"-nostats", "-hide_banner",
|
||||||
|
"-loglevel", "warning",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
videoData = ffmpeg_proc.stdout.read(int(hdhr.config["ffmpeg"]["bytes_per_read"]))
|
||||||
|
|
||||||
|
while True:
|
||||||
|
if not videoData:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
# from https://stackoverflow.com/questions/9932332
|
||||||
|
try:
|
||||||
|
yield videoData
|
||||||
|
time.sleep(0.1)
|
||||||
|
except IOError as e:
|
||||||
|
# Check we hit a broken pipe when trying to write back to the client
|
||||||
|
if e.errno == errno.EPIPE:
|
||||||
|
# Send SIGTERM to shutdown ffmpeg
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
# ffmpeg writes a bit of data out to stderr after it terminates,
|
||||||
|
# need to read any hanging data to prevent a zombie process.
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
videoData = ffmpeg_proc.stdout.read(int(hdhr.config["ffmpeg"]["bytes_per_read"]))
|
||||||
|
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
try:
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
except ValueError:
|
||||||
|
print("Connection Closed")
|
||||||
|
|
||||||
|
return Response(stream_with_context(generate()), mimetype="audio/mpeg")
|
||||||
|
abort(404)
|
||||||
|
|
||||||
|
@app.route('/lineup.post', methods=['POST'])
|
||||||
|
def lineup_post():
|
||||||
|
if 'scan' in list(request.args.keys()):
|
||||||
|
if request.args['scan'] == 'start':
|
||||||
|
hdhr.station_scan_change(True)
|
||||||
|
hdhr.station_list = []
|
||||||
|
hdhr.station_scan_change(False)
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
elif request.args['scan'] == 'abort':
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
print("Unknown scan command " + request.args['scan'])
|
||||||
|
currenthtmlerror = hdhr.get_html_error("501 - " + request.args['scan'] + " is not a valid scan command")
|
||||||
|
return Response(status=200, response=currenthtmlerror, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
currenthtmlerror = hdhr.get_html_error("501 - not a valid command")
|
||||||
|
return Response(status=200, response=currenthtmlerror, mimetype='text/html')
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config.config
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
http = WSGIServer((
|
||||||
|
self.config["fakehdhr"]["address"],
|
||||||
|
int(self.config["fakehdhr"]["port"])
|
||||||
|
), self.app.wsgi_app)
|
||||||
|
http.serve_forever()
|
||||||
|
|
||||||
|
|
||||||
|
def interface_start(config, serviceproxy, epghandling):
|
||||||
|
hdhr.config = config.config
|
||||||
|
hdhr.station_scan = False
|
||||||
|
hdhr.serviceproxy = serviceproxy
|
||||||
|
hdhr.epghandling = epghandling
|
||||||
|
fakhdhrserver = HDHR_HTTP_Server(config)
|
||||||
|
fakhdhrserver.run()
|
||||||
170
fhdhrconfig/__init__.py
Normal file
170
fhdhrconfig/__init__.py
Normal file
@ -0,0 +1,170 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
import configparser
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
def clean_exit():
|
||||||
|
sys.stderr.flush()
|
||||||
|
sys.stdout.flush()
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
class HDHRConfig():
|
||||||
|
|
||||||
|
config_file = None
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
script_dir = None
|
||||||
|
|
||||||
|
config = {
|
||||||
|
"main": {
|
||||||
|
'uuid': None,
|
||||||
|
"cache_dir": None,
|
||||||
|
"empty_epg_update_frequency": 43200,
|
||||||
|
},
|
||||||
|
"nextpvr": {
|
||||||
|
"address": "localhost",
|
||||||
|
"port": 8866,
|
||||||
|
"ssl": False,
|
||||||
|
"pin": None,
|
||||||
|
"weight": 300, # subscription priority
|
||||||
|
"sidfile": None,
|
||||||
|
"epg_update_frequency": 43200,
|
||||||
|
},
|
||||||
|
"fakehdhr": {
|
||||||
|
"address": "0.0.0.0",
|
||||||
|
"port": 5004,
|
||||||
|
"discovery_address": "0.0.0.0",
|
||||||
|
"tuner_count": 4, # number of tuners in tvh
|
||||||
|
"concurrent_listeners": 10,
|
||||||
|
"friendlyname": "fHDHR-NextPVR",
|
||||||
|
"stream_type": "direct",
|
||||||
|
"epg_method": "proxy",
|
||||||
|
"font": None,
|
||||||
|
},
|
||||||
|
"zap2xml": {
|
||||||
|
"delay": 5,
|
||||||
|
"postalcode": None,
|
||||||
|
"affiliate_id": 'gapzap',
|
||||||
|
"country": 'USA',
|
||||||
|
"device": '-',
|
||||||
|
"headendid": "lineupId",
|
||||||
|
"isoverride": True,
|
||||||
|
"languagecode": 'en',
|
||||||
|
"pref": "",
|
||||||
|
"timespan": 6,
|
||||||
|
"timezone": "",
|
||||||
|
"userid": "-",
|
||||||
|
"epg_update_frequency": 43200,
|
||||||
|
},
|
||||||
|
"ffmpeg": {
|
||||||
|
'ffmpeg_path': "ffmpeg",
|
||||||
|
'bytes_per_read': '1152000',
|
||||||
|
"font": None,
|
||||||
|
},
|
||||||
|
"direct_stream": {
|
||||||
|
'chunksize': 1024*1024 # usually you don't need to edit this
|
||||||
|
},
|
||||||
|
"dev": {
|
||||||
|
'reporting_model': 'HDHR4-2DT',
|
||||||
|
'reporting_firmware_name': 'hdhomerun4_dvbt',
|
||||||
|
'reporting_firmware_ver': '20150826',
|
||||||
|
'reporting_tuner_type': "Antenna",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, script_dir, args):
|
||||||
|
self.get_config_path(script_dir, args)
|
||||||
|
self.import_config()
|
||||||
|
self.config_adjustments(script_dir)
|
||||||
|
|
||||||
|
def get_config_path(self, script_dir, args):
|
||||||
|
if args.cfg:
|
||||||
|
self.config_file = pathlib.Path(str(args.cfg))
|
||||||
|
if not self.config_file or not os.path.exists(self.config_file):
|
||||||
|
print("Config file missing, Exiting...")
|
||||||
|
clean_exit()
|
||||||
|
print("Loading Configuration File: " + str(self.config_file))
|
||||||
|
|
||||||
|
def import_config(self):
|
||||||
|
self.config_handler.read(self.config_file)
|
||||||
|
for each_section in self.config_handler.sections():
|
||||||
|
if each_section not in list(self.config.keys()):
|
||||||
|
self.config[each_section] = {}
|
||||||
|
for (each_key, each_val) in self.config_handler.items(each_section):
|
||||||
|
self.config[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
|
def write(self, section, key, value):
|
||||||
|
self.config[section][key] = value
|
||||||
|
self.config_handler.set(section, key, value)
|
||||||
|
|
||||||
|
with open(self.config_file, 'w') as config_file:
|
||||||
|
self.config_handler.write(config_file)
|
||||||
|
|
||||||
|
def config_adjustments(self, script_dir):
|
||||||
|
|
||||||
|
self.config["main"]["script_dir"] = script_dir
|
||||||
|
|
||||||
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
|
self.config["main"]["data_dir"] = data_dir
|
||||||
|
|
||||||
|
self.config["fakehdhr"]["font"] = pathlib.Path(data_dir).joinpath('garamond.ttf')
|
||||||
|
|
||||||
|
if not self.config["main"]["cache_dir"]:
|
||||||
|
self.config["main"]["cache_dir"] = pathlib.Path(data_dir).joinpath('cache')
|
||||||
|
else:
|
||||||
|
self.config["main"]["cache_dir"] = pathlib.Path(self.config["main"]["cache_dir"])
|
||||||
|
if not self.config["main"]["cache_dir"].is_dir():
|
||||||
|
print("Invalid Cache Directory. Exiting...")
|
||||||
|
clean_exit()
|
||||||
|
cache_dir = self.config["main"]["cache_dir"]
|
||||||
|
|
||||||
|
if not self.config["nextpvr"]["pin"]:
|
||||||
|
print("NextPVR Login Credentials Missing. Exiting...")
|
||||||
|
clean_exit()
|
||||||
|
|
||||||
|
empty_cache = pathlib.Path(cache_dir).joinpath('empty_cache')
|
||||||
|
self.config["main"]["empty_cache"] = empty_cache
|
||||||
|
if not empty_cache.is_dir():
|
||||||
|
empty_cache.mkdir()
|
||||||
|
self.config["main"]["empty_cache_file"] = pathlib.Path(empty_cache).joinpath('epg.json')
|
||||||
|
|
||||||
|
nextpvr_cache = pathlib.Path(cache_dir).joinpath('nextpvr')
|
||||||
|
self.config["main"]["nextpvr_cache"] = nextpvr_cache
|
||||||
|
if not nextpvr_cache.is_dir():
|
||||||
|
nextpvr_cache.mkdir()
|
||||||
|
self.config["nextpvr"]["sidfile"] = pathlib.Path(nextpvr_cache).joinpath('sid.txt')
|
||||||
|
self.config["nextpvr"]["epg_cache"] = pathlib.Path(nextpvr_cache).joinpath('epg.json')
|
||||||
|
|
||||||
|
zap_cache = pathlib.Path(cache_dir).joinpath('zap2it')
|
||||||
|
self.config["main"]["zap_cache"] = zap_cache
|
||||||
|
if not zap_cache.is_dir():
|
||||||
|
zap_cache.mkdir()
|
||||||
|
self.config["zap2xml"]["epg_cache"] = pathlib.Path(zap_cache).joinpath('epg.json')
|
||||||
|
zap_web_cache = pathlib.Path(zap_cache).joinpath('zap_web_cache')
|
||||||
|
self.config["main"]["zap_web_cache"] = zap_web_cache
|
||||||
|
if not zap_web_cache.is_dir():
|
||||||
|
zap_web_cache.mkdir()
|
||||||
|
|
||||||
|
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||||
|
self.config["main"]["www_dir"] = www_dir
|
||||||
|
self.config["main"]["favicon"] = pathlib.Path(www_dir).joinpath('favicon.ico')
|
||||||
|
|
||||||
|
www_image_dir = pathlib.Path(www_dir).joinpath('images')
|
||||||
|
self.config["main"]["www_image_dir"] = www_image_dir
|
||||||
|
self.config["main"]["image_def_channel"] = pathlib.Path(www_image_dir).joinpath("default-channel-thumb.png")
|
||||||
|
self.config["main"]["image_def_content"] = pathlib.Path(www_image_dir).joinpath("default-content-thumb.png")
|
||||||
|
|
||||||
|
# generate UUID here for when we are not using docker
|
||||||
|
if self.config["main"]["uuid"] is None:
|
||||||
|
print("No UUID found. Generating one now...")
|
||||||
|
# from https://pynative.com/python-generate-random-string/
|
||||||
|
# create a string that wouldn't be a real device uuid for
|
||||||
|
self.config["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
|
self.write('main', 'uuid', self.config["main"]["uuid"])
|
||||||
|
print("UUID set to: " + self.config["main"]["uuid"] + "...")
|
||||||
|
|
||||||
|
print("Server is set to run on " +
|
||||||
|
str(self.config["fakehdhr"]["address"]) + ":" +
|
||||||
|
str(self.config["fakehdhr"]["port"]))
|
||||||
65
main.py
Normal file
65
main.py
Normal file
@ -0,0 +1,65 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import pathlib
|
||||||
|
import argparse
|
||||||
|
from multiprocessing import Process
|
||||||
|
|
||||||
|
import fhdhrconfig
|
||||||
|
import proxyservice
|
||||||
|
import fakehdhr
|
||||||
|
import epghandler
|
||||||
|
import ssdpserver
|
||||||
|
|
||||||
|
if sys.version_info.major == 2 or sys.version_info < (3, 3):
|
||||||
|
print('Error: FakeHDHR requires python 3.3+.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def get_args():
|
||||||
|
parser = argparse.ArgumentParser(description='FakeHDHR.', epilog='')
|
||||||
|
parser.add_argument('--config_file', dest='cfg', type=str, default=None, help='')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def clean_exit():
|
||||||
|
sys.stderr.flush()
|
||||||
|
sys.stdout.flush()
|
||||||
|
os._exit(0)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
|
||||||
|
# Gather args
|
||||||
|
args = get_args()
|
||||||
|
|
||||||
|
# set to directory of script
|
||||||
|
script_dir = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
|
# Open Configuration File
|
||||||
|
print("Opening and Verifying Configuration File.")
|
||||||
|
config = fhdhrconfig.HDHRConfig(script_dir, args)
|
||||||
|
|
||||||
|
# Open proxyservice
|
||||||
|
serviceproxy = proxyservice.proxyserviceFetcher(config)
|
||||||
|
|
||||||
|
# Open EPG Handler
|
||||||
|
epghandling = epghandler.EPGhandler(config, serviceproxy)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
print("Starting EPG thread...")
|
||||||
|
epgServer = Process(target=epghandler.epgServerProcess, args=(config, epghandling))
|
||||||
|
epgServer.start()
|
||||||
|
|
||||||
|
print("Starting fHDHR Interface")
|
||||||
|
fhdhrServer = Process(target=fakehdhr.interface_start, args=(config, serviceproxy, epghandling))
|
||||||
|
fhdhrServer.start()
|
||||||
|
|
||||||
|
print("Starting SSDP server...")
|
||||||
|
ssdpServer = Process(target=ssdpserver.ssdpServerProcess, args=(config,))
|
||||||
|
ssdpServer.daemon = True
|
||||||
|
ssdpServer.start()
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('^C received, shutting down the server')
|
||||||
|
clean_exit()
|
||||||
292
proxyservice/__init__.py
Normal file
292
proxyservice/__init__.py
Normal file
@ -0,0 +1,292 @@
|
|||||||
|
import os
|
||||||
|
import xmltodict
|
||||||
|
import json
|
||||||
|
import hashlib
|
||||||
|
import datetime
|
||||||
|
import urllib.error
|
||||||
|
import urllib.parse
|
||||||
|
import urllib.request
|
||||||
|
|
||||||
|
|
||||||
|
class NextPVR_Auth():
|
||||||
|
config = {
|
||||||
|
'npvrURL': '',
|
||||||
|
'npvrSID': '',
|
||||||
|
'npvrPIN': '',
|
||||||
|
}
|
||||||
|
sidfile = None
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.sidfile = config.config["nextpvr"]["sidfile"]
|
||||||
|
self.config["npvrPIN"] = config.config["nextpvr"]["pin"]
|
||||||
|
self.config["npvrURL"] = ('%s%s:%s' %
|
||||||
|
("https://" if config.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
config.config["nextpvr"]["address"],
|
||||||
|
str(config.config["nextpvr"]["port"]),
|
||||||
|
))
|
||||||
|
|
||||||
|
def _check_sid(self):
|
||||||
|
if 'sid' not in self.config:
|
||||||
|
if os.path.isfile(self.sidfile):
|
||||||
|
with open(self.sidfile, 'r') as text_file:
|
||||||
|
self.config['sid'] = text_file.read()
|
||||||
|
print('Read SID from file.')
|
||||||
|
else:
|
||||||
|
self._get_sid()
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _get_sid(self):
|
||||||
|
sid = ''
|
||||||
|
salt = ''
|
||||||
|
clientKey = ''
|
||||||
|
|
||||||
|
initiate_url = "%s/service?method=session.initiate&ver=1.0&device=fhdhr" % self.config['npvrURL']
|
||||||
|
|
||||||
|
initiate_req = urllib.request.urlopen(initiate_url)
|
||||||
|
initiate_dict = xmltodict.parse(initiate_req)
|
||||||
|
|
||||||
|
sid = initiate_dict['rsp']['sid']
|
||||||
|
salt = initiate_dict['rsp']['salt']
|
||||||
|
md5PIN = hashlib.md5(self.config['npvrPIN'].encode('utf-8')).hexdigest()
|
||||||
|
string = ':%s:%s' % (md5PIN, salt)
|
||||||
|
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
login_url = '%s/service?method=session.login&sid=%s&md5=%s' % (self.config['npvrURL'], sid, clientKey)
|
||||||
|
login_req = urllib.request.urlopen(login_url)
|
||||||
|
login_dict = xmltodict.parse(login_req)
|
||||||
|
|
||||||
|
if login_dict['rsp']['allow_watch'] == "true":
|
||||||
|
self.config['sid'] = sid
|
||||||
|
with open(self.sidfile, 'w') as text_file:
|
||||||
|
text_file.write(self.config['sid'])
|
||||||
|
print('Wrote SID to file.')
|
||||||
|
else:
|
||||||
|
print("NextPVR Login Failed")
|
||||||
|
self.config['sid'] = ''
|
||||||
|
|
||||||
|
|
||||||
|
def xmltimestamp_nextpvr(epochtime):
|
||||||
|
xmltime = datetime.datetime.fromtimestamp(int(epochtime)/1000)
|
||||||
|
xmltime = str(xmltime.strftime('%Y%m%d%H%M%S')) + " +0000"
|
||||||
|
return xmltime
|
||||||
|
|
||||||
|
|
||||||
|
def duration_nextpvr_minutes(starttime, endtime):
|
||||||
|
return ((int(endtime) - int(starttime))/1000/60)
|
||||||
|
|
||||||
|
|
||||||
|
class proxyserviceFetcher():
|
||||||
|
|
||||||
|
def __init__(self, config):
|
||||||
|
self.config = config.config
|
||||||
|
|
||||||
|
self.epg_cache = None
|
||||||
|
self.epg_cache_file = config.config["nextpvr"]["epg_cache"]
|
||||||
|
|
||||||
|
self.servicename = "NextPVRProxy"
|
||||||
|
|
||||||
|
self.urls = {}
|
||||||
|
self.url_assembler()
|
||||||
|
|
||||||
|
self.auth = NextPVR_Auth(config)
|
||||||
|
|
||||||
|
self.epg_cache = self.epg_cache_open()
|
||||||
|
|
||||||
|
def epg_cache_open(self):
|
||||||
|
epg_cache = None
|
||||||
|
if os.path.isfile(self.epg_cache_file):
|
||||||
|
with open(self.epg_cache_file, 'r') as epgfile:
|
||||||
|
epg_cache = json.load(epgfile)
|
||||||
|
return epg_cache
|
||||||
|
|
||||||
|
def url_assembler(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
self.auth._check_sid()
|
||||||
|
|
||||||
|
url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
||||||
|
("https://" if self.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
self.config["nextpvr"]["address"],
|
||||||
|
str(self.config["nextpvr"]["port"]),
|
||||||
|
self.auth.config['sid']
|
||||||
|
))
|
||||||
|
|
||||||
|
r = urllib.request.urlopen(url)
|
||||||
|
data_dict = xmltodict.parse(r)
|
||||||
|
|
||||||
|
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||||
|
print("could not retrieve channel list")
|
||||||
|
return []
|
||||||
|
|
||||||
|
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||||
|
|
||||||
|
channel_list = []
|
||||||
|
for c in channel_o_list:
|
||||||
|
dString = json.dumps(c)
|
||||||
|
channel_dict = eval(dString)
|
||||||
|
channel_list.append(channel_dict)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
|
def get_station_list(self, base_url):
|
||||||
|
station_list = []
|
||||||
|
|
||||||
|
for c in self.get_channels():
|
||||||
|
if self.config["fakehdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
watchtype = "ffmpeg"
|
||||||
|
else:
|
||||||
|
watchtype = "direct"
|
||||||
|
url = ('%s%s/watch?method=%s&channel=%s' %
|
||||||
|
("http://",
|
||||||
|
base_url,
|
||||||
|
watchtype,
|
||||||
|
c['formatted-number']
|
||||||
|
))
|
||||||
|
station_list.append(
|
||||||
|
{
|
||||||
|
'GuideNumber': str(c['formatted-number']),
|
||||||
|
'GuideName': c['name'],
|
||||||
|
'URL': url
|
||||||
|
})
|
||||||
|
return station_list
|
||||||
|
|
||||||
|
def get_station_total(self):
|
||||||
|
total_channels = 0
|
||||||
|
for c in self.get_channels():
|
||||||
|
total_channels += 1
|
||||||
|
return total_channels
|
||||||
|
|
||||||
|
def get_channel_streams(self):
|
||||||
|
streamdict = {}
|
||||||
|
for c in self.get_channels():
|
||||||
|
url = ('%s%s:%s/live?channel=%s&client=%s' %
|
||||||
|
("https://" if self.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
self.config["nextpvr"]["address"],
|
||||||
|
str(self.config["nextpvr"]["port"]),
|
||||||
|
str(c["formatted-number"]),
|
||||||
|
str(c["formatted-number"]),
|
||||||
|
))
|
||||||
|
streamdict[str(c["formatted-number"])] = url
|
||||||
|
return streamdict
|
||||||
|
|
||||||
|
def get_channel_thumbnail(self, channel_id):
|
||||||
|
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||||
|
("https://" if self.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
self.config["nextpvr"]["address"],
|
||||||
|
str(self.config["nextpvr"]["port"]),
|
||||||
|
str(channel_id)
|
||||||
|
))
|
||||||
|
return channel_thumb_url
|
||||||
|
|
||||||
|
def get_content_thumbnail(self, content_id):
|
||||||
|
self.auth._check_sid()
|
||||||
|
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
||||||
|
("https://" if self.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
self.config["nextpvr"]["address"],
|
||||||
|
str(self.config["nextpvr"]["port"]),
|
||||||
|
self.auth.config['sid'],
|
||||||
|
str(content_id)
|
||||||
|
))
|
||||||
|
return item_thumb_url
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
print('Updating NextPVR EPG cache file.')
|
||||||
|
self.auth._check_sid()
|
||||||
|
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
for c in self.get_channels():
|
||||||
|
if str(c["formatted-number"]) not in list(programguide.keys()):
|
||||||
|
programguide[str(c["formatted-number"])] = {}
|
||||||
|
|
||||||
|
channel_thumb_path = ("/images?source=proxy&type=channel&id=%s" % (str(c['id'])))
|
||||||
|
programguide[str(c["formatted-number"])]["thumbnail"] = channel_thumb_path
|
||||||
|
|
||||||
|
if "name" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["name"] = c["name"]
|
||||||
|
|
||||||
|
if "callsign" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["callsign"] = c["name"]
|
||||||
|
|
||||||
|
if "id" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["id"] = c["id"]
|
||||||
|
|
||||||
|
if "number" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["number"] = c["formatted-number"]
|
||||||
|
|
||||||
|
if "listing" not in list(programguide[str(c["formatted-number"])].keys()):
|
||||||
|
programguide[str(c["formatted-number"])]["listing"] = []
|
||||||
|
|
||||||
|
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
||||||
|
("https://" if self.config["nextpvr"]["ssl"] else "http://",
|
||||||
|
self.config["nextpvr"]["address"],
|
||||||
|
str(self.config["nextpvr"]["port"]),
|
||||||
|
str(c["id"]),
|
||||||
|
))
|
||||||
|
epg_req = urllib.request.urlopen(epg_url)
|
||||||
|
epg_dict = xmltodict.parse(epg_req)
|
||||||
|
|
||||||
|
for program_listing in epg_dict["rsp"]["listings"]:
|
||||||
|
for program_item in epg_dict["rsp"]["listings"][program_listing]:
|
||||||
|
if not isinstance(program_item, str):
|
||||||
|
dirty_prog_dict = {}
|
||||||
|
for programkey in list(program_item.keys()):
|
||||||
|
dirty_prog_dict[programkey] = program_item[programkey]
|
||||||
|
|
||||||
|
clean_prog_dict = {}
|
||||||
|
|
||||||
|
clean_prog_dict["time_start"] = xmltimestamp_nextpvr(dirty_prog_dict["start"])
|
||||||
|
clean_prog_dict["time_end"] = xmltimestamp_nextpvr(dirty_prog_dict["end"])
|
||||||
|
clean_prog_dict["duration_minutes"] = duration_nextpvr_minutes(dirty_prog_dict["start"], dirty_prog_dict["end"])
|
||||||
|
|
||||||
|
item_thumb_path = ("/images?source=proxy&type=content&id=%s" % (str(dirty_prog_dict['id'])))
|
||||||
|
clean_prog_dict["thumbnail"] = item_thumb_path
|
||||||
|
|
||||||
|
if 'name' not in list(dirty_prog_dict.keys()):
|
||||||
|
dirty_prog_dict["name"] = "Unavailable"
|
||||||
|
elif not dirty_prog_dict["name"]:
|
||||||
|
dirty_prog_dict["name"] = "Unavailable"
|
||||||
|
clean_prog_dict["title"] = dirty_prog_dict["name"]
|
||||||
|
|
||||||
|
if 'genre' not in list(dirty_prog_dict.keys()):
|
||||||
|
clean_prog_dict["genres"] = []
|
||||||
|
else:
|
||||||
|
clean_prog_dict["genres"] = dirty_prog_dict['genre'].split(",")
|
||||||
|
|
||||||
|
if 'subtitle' not in list(dirty_prog_dict.keys()):
|
||||||
|
dirty_prog_dict["subtitle"] = "Unavailable"
|
||||||
|
clean_prog_dict["sub-title"] = dirty_prog_dict["subtitle"]
|
||||||
|
|
||||||
|
if dirty_prog_dict['subtitle'].startswith("Movie:"):
|
||||||
|
clean_prog_dict['releaseyear'] = dirty_prog_dict['subtitle'].split("Movie:")[-1]
|
||||||
|
else:
|
||||||
|
clean_prog_dict['releaseyear'] = None
|
||||||
|
|
||||||
|
if 'description' not in list(dirty_prog_dict.keys()):
|
||||||
|
dirty_prog_dict["description"] = "Unavailable"
|
||||||
|
elif dirty_prog_dict['description']:
|
||||||
|
dirty_prog_dict["description"] = "Unavailable"
|
||||||
|
clean_prog_dict["description"] = dirty_prog_dict["description"]
|
||||||
|
|
||||||
|
if 'rating' not in list(dirty_prog_dict.keys()):
|
||||||
|
dirty_prog_dict['rating'] = "N/A"
|
||||||
|
clean_prog_dict['rating'] = dirty_prog_dict['rating']
|
||||||
|
|
||||||
|
if 'season' in list(dirty_prog_dict.keys()) and 'episode' in list(dirty_prog_dict.keys()):
|
||||||
|
clean_prog_dict["seasonnumber"] = dirty_prog_dict['season']
|
||||||
|
clean_prog_dict["episodenumber"] = dirty_prog_dict['episode']
|
||||||
|
clean_prog_dict["episodetitle"] = clean_prog_dict["sub-title"]
|
||||||
|
else:
|
||||||
|
if "movie" not in clean_prog_dict["genres"]:
|
||||||
|
clean_prog_dict["episodetitle"] = clean_prog_dict["sub-title"]
|
||||||
|
|
||||||
|
# TODO isNEW
|
||||||
|
|
||||||
|
programguide[str(c["formatted-number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
self.epg_cache = programguide
|
||||||
|
with open(self.epg_cache_file, 'w') as epgfile:
|
||||||
|
epgfile.write(json.dumps(programguide, indent=4))
|
||||||
|
print('Wrote updated NextPVR EPG cache file.')
|
||||||
|
return programguide
|
||||||
4
requirements.txt
Normal file
4
requirements.txt
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
requests
|
||||||
|
gevent
|
||||||
|
flask
|
||||||
|
image
|
||||||
239
ssdpserver/__init__.py
Normal file
239
ssdpserver/__init__.py
Normal file
@ -0,0 +1,239 @@
|
|||||||
|
# Licensed under the MIT license
|
||||||
|
# http://opensource.org/licenses/mit-license.php
|
||||||
|
|
||||||
|
# Copyright 2005, Tim Potter <tpot@samba.org>
|
||||||
|
# Copyright 2006 John-Mark Gurney <gurney_j@resnet.uroegon.edu>
|
||||||
|
# Copyright (C) 2006 Fluendo, S.A. (www.fluendo.com).
|
||||||
|
# Copyright 2006,2007,2008,2009 Frank Scholz <coherence@beebits.net>
|
||||||
|
# Copyright 2016 Erwan Martin <public@fzwte.net>
|
||||||
|
#
|
||||||
|
# Implementation of a SSDP server.
|
||||||
|
#
|
||||||
|
|
||||||
|
import random
|
||||||
|
import time
|
||||||
|
import socket
|
||||||
|
import logging
|
||||||
|
from email.utils import formatdate
|
||||||
|
from errno import ENOPROTOOPT
|
||||||
|
|
||||||
|
SSDP_ADDR = '239.255.255.250'
|
||||||
|
|
||||||
|
|
||||||
|
logger = logging.getLogger()
|
||||||
|
|
||||||
|
|
||||||
|
# mostly from https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
|
def ssdpServerProcess(config):
|
||||||
|
ssdp = SSDPServer()
|
||||||
|
ssdp.ssdp_port = 1900
|
||||||
|
ssdp.register('local',
|
||||||
|
'uuid:' + config.config["main"]["uuid"] + '::upnp:rootdevice',
|
||||||
|
'upnp:rootdevice',
|
||||||
|
'http://' + config.config["fakehdhr"]["discovery_address"] + ':' +
|
||||||
|
config.config["fakehdhr"]["port"] + '/device.xml')
|
||||||
|
try:
|
||||||
|
ssdp.run()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class SSDPServer:
|
||||||
|
"""A class implementing a SSDP server. The notify_received and
|
||||||
|
searchReceived methods are called when the appropriate type of
|
||||||
|
datagram is received by the server."""
|
||||||
|
known = {}
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
if hasattr(socket, "SO_REUSEPORT"):
|
||||||
|
try:
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEPORT, 1)
|
||||||
|
except socket.error as le:
|
||||||
|
# RHEL6 defines SO_REUSEPORT but it doesn't work
|
||||||
|
if le.errno == ENOPROTOOPT:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
addr = socket.inet_aton(SSDP_ADDR)
|
||||||
|
interface = socket.inet_aton('0.0.0.0')
|
||||||
|
cmd = socket.IP_ADD_MEMBERSHIP
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IP, cmd, addr + interface)
|
||||||
|
self.sock.bind(('0.0.0.0', self.ssdp_port))
|
||||||
|
self.sock.settimeout(1)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
try:
|
||||||
|
data, addr = self.sock.recvfrom(1024)
|
||||||
|
self.datagram_received(data, addr)
|
||||||
|
except socket.timeout:
|
||||||
|
continue
|
||||||
|
self.shutdown()
|
||||||
|
|
||||||
|
def shutdown(self):
|
||||||
|
for st in self.known:
|
||||||
|
if self.known[st]['MANIFESTATION'] == 'local':
|
||||||
|
self.do_byebye(st)
|
||||||
|
|
||||||
|
def datagram_received(self, data, host_port):
|
||||||
|
"""Handle a received multicast datagram."""
|
||||||
|
|
||||||
|
(host, port) = host_port
|
||||||
|
|
||||||
|
try:
|
||||||
|
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||||
|
except ValueError as err:
|
||||||
|
logger.error(err)
|
||||||
|
return
|
||||||
|
|
||||||
|
lines = header.split('\r\n')
|
||||||
|
cmd = lines[0].split(' ')
|
||||||
|
lines = [x.replace(': ', ':', 1) for x in lines[1:]]
|
||||||
|
lines = [x for x in lines if len(x) > 0]
|
||||||
|
|
||||||
|
headers = [x.split(':', 1) for x in lines]
|
||||||
|
headers = dict([(x[0].lower(), x[1]) for x in headers])
|
||||||
|
|
||||||
|
logger.info('SSDP command %s %s - from %s:%d' % (cmd[0], cmd[1], host, port))
|
||||||
|
logger.debug('with headers: {}.'.format(headers))
|
||||||
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
|
# SSDP discovery
|
||||||
|
self.discovery_request(headers, (host, port))
|
||||||
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
# SSDP presence
|
||||||
|
logger.debug('NOTIFY *')
|
||||||
|
else:
|
||||||
|
logger.warning('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
|
def register(self, manifestation, usn, st, location, cache_control='max-age=1800', silent=False,
|
||||||
|
host=None):
|
||||||
|
"""Register a service or device that this SSDP server will
|
||||||
|
respond to."""
|
||||||
|
|
||||||
|
logging.info('Registering %s (%s)' % (st, location))
|
||||||
|
|
||||||
|
self.known[usn] = {}
|
||||||
|
self.known[usn]['USN'] = usn
|
||||||
|
self.known[usn]['LOCATION'] = location
|
||||||
|
self.known[usn]['ST'] = st
|
||||||
|
self.known[usn]['EXT'] = ''
|
||||||
|
self.known[usn]['SERVER'] = "fHDHR Server"
|
||||||
|
self.known[usn]['CACHE-CONTROL'] = cache_control
|
||||||
|
|
||||||
|
self.known[usn]['MANIFESTATION'] = manifestation
|
||||||
|
self.known[usn]['SILENT'] = silent
|
||||||
|
self.known[usn]['HOST'] = host
|
||||||
|
self.known[usn]['last-seen'] = time.time()
|
||||||
|
|
||||||
|
if manifestation == 'local' and self.sock:
|
||||||
|
self.do_notify(usn)
|
||||||
|
|
||||||
|
def unregister(self, usn):
|
||||||
|
logger.info("Un-registering %s" % usn)
|
||||||
|
del self.known[usn]
|
||||||
|
|
||||||
|
def is_known(self, usn):
|
||||||
|
return usn in self.known
|
||||||
|
|
||||||
|
def send_it(self, response, destination, delay, usn):
|
||||||
|
logger.debug('send discovery response delayed by %ds for %s to %r' % (delay, usn, destination))
|
||||||
|
try:
|
||||||
|
self.sock.sendto(response.encode(), destination)
|
||||||
|
except (AttributeError, socket.error) as msg:
|
||||||
|
logger.warning("failure sending out byebye notification: %r" % msg)
|
||||||
|
|
||||||
|
def discovery_request(self, headers, host_port):
|
||||||
|
"""Process a discovery request. The response must be sent to
|
||||||
|
the address specified by (host, port)."""
|
||||||
|
|
||||||
|
(host, port) = host_port
|
||||||
|
|
||||||
|
logger.info('Discovery request from (%s,%d) for %s' % (host, port, headers['st']))
|
||||||
|
logger.info('Discovery request for %s' % headers['st'])
|
||||||
|
|
||||||
|
# Do we know about this service?
|
||||||
|
for i in list(self.known.values()):
|
||||||
|
if i['MANIFESTATION'] == 'remote':
|
||||||
|
continue
|
||||||
|
if headers['st'] == 'ssdp:all' and i['SILENT']:
|
||||||
|
continue
|
||||||
|
if i['ST'] == headers['st'] or headers['st'] == 'ssdp:all':
|
||||||
|
response = ['HTTP/1.1 200 OK']
|
||||||
|
|
||||||
|
usn = None
|
||||||
|
for k, v in list(i.items()):
|
||||||
|
if k == 'USN':
|
||||||
|
usn = v
|
||||||
|
if k not in ('MANIFESTATION', 'SILENT', 'HOST'):
|
||||||
|
response.append('%s: %s' % (k, v))
|
||||||
|
|
||||||
|
if usn:
|
||||||
|
response.append('DATE: %s' % formatdate(timeval=None, localtime=False, usegmt=True))
|
||||||
|
|
||||||
|
response.extend(('', ''))
|
||||||
|
delay = random.randint(0, int(headers['mx']))
|
||||||
|
|
||||||
|
self.send_it('\r\n'.join(response), (host, port), delay, usn)
|
||||||
|
|
||||||
|
def do_notify(self, usn):
|
||||||
|
"""Do notification"""
|
||||||
|
|
||||||
|
if self.known[usn]['SILENT']:
|
||||||
|
return
|
||||||
|
logger.info('Sending alive notification for %s' % usn)
|
||||||
|
|
||||||
|
resp = [
|
||||||
|
'NOTIFY * HTTP/1.1',
|
||||||
|
'HOST: %s:%d' % (SSDP_ADDR, self.ssdp_port),
|
||||||
|
'NTS: ssdp:alive',
|
||||||
|
]
|
||||||
|
stcpy = dict(list(self.known[usn].items()))
|
||||||
|
stcpy['NT'] = stcpy['ST']
|
||||||
|
del stcpy['ST']
|
||||||
|
del stcpy['MANIFESTATION']
|
||||||
|
del stcpy['SILENT']
|
||||||
|
del stcpy['HOST']
|
||||||
|
del stcpy['last-seen']
|
||||||
|
|
||||||
|
resp.extend([': '.join(x) for x in list(stcpy.items())])
|
||||||
|
resp.extend(('', ''))
|
||||||
|
logger.debug('do_notify content', resp)
|
||||||
|
try:
|
||||||
|
self.sock.sendto('\r\n'.join(resp).encode(), (SSDP_ADDR, self.ssdp_port))
|
||||||
|
self.sock.sendto('\r\n'.join(resp).encode(), (SSDP_ADDR, self.ssdp_port))
|
||||||
|
except (AttributeError, socket.error) as msg:
|
||||||
|
logger.warning("failure sending out alive notification: %r" % msg)
|
||||||
|
|
||||||
|
def do_byebye(self, usn):
|
||||||
|
"""Do byebye"""
|
||||||
|
|
||||||
|
logger.info('Sending byebye notification for %s' % usn)
|
||||||
|
|
||||||
|
resp = [
|
||||||
|
'NOTIFY * HTTP/1.1',
|
||||||
|
'HOST: %s:%d' % (SSDP_ADDR, self.ssdp_port),
|
||||||
|
'NTS: ssdp:byebye',
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
stcpy = dict(list(self.known[usn].items()))
|
||||||
|
stcpy['NT'] = stcpy['ST']
|
||||||
|
del stcpy['ST']
|
||||||
|
del stcpy['MANIFESTATION']
|
||||||
|
del stcpy['SILENT']
|
||||||
|
del stcpy['HOST']
|
||||||
|
del stcpy['last-seen']
|
||||||
|
resp.extend([': '.join(x) for x in list(stcpy.items())])
|
||||||
|
resp.extend(('', ''))
|
||||||
|
logger.debug('do_byebye content', resp)
|
||||||
|
if self.sock:
|
||||||
|
try:
|
||||||
|
self.sock.sendto('\r\n'.join(resp), (SSDP_ADDR, self.ssdp_port))
|
||||||
|
except (AttributeError, socket.error) as msg:
|
||||||
|
logger.error("failure sending out byebye notification: %r" % msg)
|
||||||
|
except KeyError as msg:
|
||||||
|
logger.error("error building byebye notification: %r" % msg)
|
||||||
Loading…
Reference in New Issue
Block a user