mirror of
https://github.com/fHDHR/fHDHR_NextPVR.git
synced 2025-12-06 12:26:57 -05:00
Compare commits
45 Commits
2021-01-20
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
e40543552a | ||
|
|
c376cfcaa9 | ||
|
|
6ecf54a8d0 | ||
|
|
0d185e6704 | ||
|
|
09ddda62fc | ||
|
|
b1d4e13c31 | ||
|
|
eaf26457f0 | ||
|
|
b92247c810 | ||
|
|
0fb454016d | ||
|
|
ab51ea02a1 | ||
|
|
884d4b6e27 | ||
|
|
9c72f30a99 | ||
|
|
72627510aa | ||
|
|
9caeac2f41 | ||
|
|
d03e575f0b | ||
|
|
54e1e72104 | ||
|
|
b16fcf3b51 | ||
|
|
e86290e9fe | ||
|
|
0de184c242 | ||
|
|
5f4092bdc8 | ||
|
|
d9cda8b1d4 | ||
|
|
a7c854bcd4 | ||
|
|
13faf0845e | ||
|
|
1cf2a7acce | ||
|
|
36712e7ba0 | ||
|
|
02e825978b | ||
|
|
9642feecae | ||
|
|
b8ce4f4e8a | ||
|
|
22955ce11f | ||
|
|
1aa35b66f0 | ||
|
|
cbe8deb965 | ||
|
|
d118ef7807 | ||
|
|
c9b20743fd | ||
|
|
1b13aedc5e | ||
|
|
ddcb04892b | ||
|
|
6076011d1c | ||
|
|
751eaebee9 | ||
|
|
53dc0e127d | ||
|
|
acf72ad109 | ||
|
|
630b8dbf2b | ||
|
|
e8aa5bd3f4 | ||
|
|
03927ec495 | ||
|
|
6a924cbca2 | ||
|
|
4bd2ff971e | ||
|
|
63685f4a0e |
@ -15,3 +15,8 @@ Please Check the [Docs](docs/README.md) for Installation information.
|
|||||||
fHDHR is labeled as beta until we reach v1.0.0
|
fHDHR is labeled as beta until we reach v1.0.0
|
||||||
|
|
||||||
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||||
|
|
||||||
|
# !!NOTICE!!
|
||||||
|
|
||||||
|
To reduce code duplication between variants, I am moving to a plugin system.
|
||||||
|
The normal variant repos will stay active during the transition.
|
||||||
|
|||||||
@ -1,3 +0,0 @@
|
|||||||
# pylama:ignore=W0401,W0611
|
|
||||||
from .zap2it import *
|
|
||||||
from .tvtv import *
|
|
||||||
@ -6,7 +6,7 @@
|
|||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
"method":{
|
"method":{
|
||||||
"value": "blocks",
|
"value": "none",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
@ -15,11 +15,6 @@
|
|||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
"valid_epg_methods":{
|
|
||||||
"value": "None,blocks",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"reverse_days": {
|
"reverse_days": {
|
||||||
"value": -1,
|
"value": -1,
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
|
|||||||
@ -15,26 +15,6 @@
|
|||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
"reporting_manufacturer":{
|
|
||||||
"value": "BoronDust",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reporting_model":{
|
|
||||||
"value": "fHDHR",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reporting_firmware_ver":{
|
|
||||||
"value": "20201001",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reporting_tuner_type":{
|
|
||||||
"value": "Antenna",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"device_auth":{
|
"device_auth":{
|
||||||
"value": "fHDHR",
|
"value": "fHDHR",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
@ -54,16 +34,6 @@
|
|||||||
"value": "fHDHR",
|
"value": "fHDHR",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
}
|
||||||
"tuner_count":{
|
|
||||||
"value": 4,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reporting_firmware_name":{
|
|
||||||
"value": "fHDHR",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -14,21 +14,11 @@
|
|||||||
"value": "fHDHR",
|
"value": "fHDHR",
|
||||||
"config_file": false,
|
"config_file": false,
|
||||||
"config_web": false
|
"config_web": false
|
||||||
},
|
},
|
||||||
"dictpopname":{
|
|
||||||
"value": "fHDHR",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"reponame":{
|
"reponame":{
|
||||||
"value": "fHDHR",
|
"value": "fHDHR",
|
||||||
"config_file": false,
|
"config_file": false,
|
||||||
"config_web": false
|
"config_web": false
|
||||||
},
|
|
||||||
"required":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"rmg":{
|
|
||||||
"enabled":{
|
|
||||||
"value": true,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -5,7 +5,12 @@
|
|||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
"quality": {
|
"origin_quality": {
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"transcode_quality": {
|
||||||
"value": "none",
|
"value": "none",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
@ -15,19 +20,5 @@
|
|||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
}
|
}
|
||||||
},
|
}
|
||||||
"ffmpeg":{
|
|
||||||
"path":{
|
|
||||||
"value": "ffmpeg",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"vlc":{
|
|
||||||
"path":{
|
|
||||||
"value": "cvlc",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|||||||
@ -32,11 +32,16 @@ Here's the `main` section.
|
|||||||
|
|
||||||
* `method` can be set to `ffmpeg`, `vlc` or `direct`.
|
* `method` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||||
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
* `origin_quality` can be set to high,medium,low for most variants. Variants that make use of m3u8 will Autoselect High for the direct method if not set. ffmpeg/vlc will determine the best stream on their own. Some Variants can allow alternative values.
|
||||||
|
* `transcode_quality` works with ffmpeg/vlc to use fHDHR for handling quality instead of the origin. Valid settings include: heavy,mobile,internet720,internet480,internet360,internet240
|
||||||
|
|
||||||
|
|
||||||
````
|
````
|
||||||
[streaming]
|
[streaming]
|
||||||
# method = direct
|
# method = direct
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
# origin_quality = None
|
||||||
|
# transcode_quality = None
|
||||||
````
|
````
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -1,37 +1,40 @@
|
|||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
|
|
||||||
from .originwrapper import OriginServiceWrapper
|
|
||||||
from .device import fHDHR_Device
|
from .device import fHDHR_Device
|
||||||
from .api import fHDHR_API_URLs
|
from .api import fHDHR_API_URLs
|
||||||
|
|
||||||
import fHDHR.tools
|
import fHDHR.tools
|
||||||
|
|
||||||
fHDHR_VERSION = "v0.6.0-beta"
|
fHDHR_VERSION = "v0.6.0-beta"
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_INT_OBJ():
|
class fHDHR_INT_OBJ():
|
||||||
|
|
||||||
def __init__(self, settings, logger, db):
|
def __init__(self, settings, logger, db, plugins):
|
||||||
self.version = fHDHR_VERSION
|
self.version = fHDHR_VERSION
|
||||||
self.config = settings
|
self.config = settings
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self.db = db
|
self.db = db
|
||||||
|
self.plugins = plugins
|
||||||
|
|
||||||
self.web = fHDHR.tools.WebReq()
|
self.web = fHDHR.tools.WebReq()
|
||||||
|
for plugin_name in list(self.plugins.plugins.keys()):
|
||||||
|
self.plugins.plugins[plugin_name].plugin_utils.web = self.web
|
||||||
|
|
||||||
self.api = fHDHR_API_URLs(settings, self.web)
|
self.api = fHDHR_API_URLs(settings, self.web)
|
||||||
|
for plugin_name in list(self.plugins.plugins.keys()):
|
||||||
|
self.plugins.plugins[plugin_name].plugin_utils.api = self.api
|
||||||
|
|
||||||
self.threads = {}
|
self.threads = {}
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_OBJ():
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
def __init__(self, settings, logger, db, origin, alternative_epg):
|
def __init__(self, settings, logger, db, plugins):
|
||||||
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db)
|
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db, plugins)
|
||||||
|
|
||||||
self.originwrapper = OriginServiceWrapper(self.fhdhr, origin)
|
self.fhdhr.origins = fHDHR.origins.Origins(self.fhdhr)
|
||||||
|
|
||||||
self.device = fHDHR_Device(self.fhdhr, self.originwrapper, alternative_epg)
|
self.device = fHDHR_Device(self.fhdhr, self.fhdhr.origins)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
|
|||||||
@ -7,6 +7,8 @@ from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
|||||||
import fHDHR.exceptions
|
import fHDHR.exceptions
|
||||||
import fHDHR.config
|
import fHDHR.config
|
||||||
import fHDHR.logger
|
import fHDHR.logger
|
||||||
|
import fHDHR.plugins
|
||||||
|
import fHDHR.origins
|
||||||
from fHDHR.db import fHDHRdb
|
from fHDHR.db import fHDHRdb
|
||||||
|
|
||||||
ERR_CODE = 1
|
ERR_CODE = 1
|
||||||
@ -25,15 +27,15 @@ def build_args_parser():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def get_configuration(args, script_dir, origin, fHDHR_web):
|
def get_configuration(args, script_dir, fHDHR_web):
|
||||||
if not os.path.isfile(args.cfg):
|
if not os.path.isfile(args.cfg):
|
||||||
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||||
return fHDHR.config.Config(args.cfg, script_dir, origin, fHDHR_web)
|
return fHDHR.config.Config(args.cfg, script_dir, fHDHR_web)
|
||||||
|
|
||||||
|
|
||||||
def run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg):
|
def run(settings, logger, db, script_dir, fHDHR_web, plugins):
|
||||||
|
|
||||||
fhdhr = fHDHR_OBJ(settings, logger, db, origin, alternative_epg)
|
fhdhr = fHDHR_OBJ(settings, logger, db, plugins)
|
||||||
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -64,33 +66,50 @@ def run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg):
|
|||||||
return ERR_CODE
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
def start(args, script_dir, fHDHR_web, origin, alternative_epg):
|
def start(args, script_dir, fHDHR_web):
|
||||||
"""Get Configuration for fHDHR and start"""
|
"""Get Configuration for fHDHR and start"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
settings = get_configuration(args, script_dir, origin, fHDHR_web)
|
settings = get_configuration(args, script_dir, fHDHR_web)
|
||||||
except fHDHR.exceptions.ConfigurationError as e:
|
except fHDHR.exceptions.ConfigurationError as e:
|
||||||
print(e)
|
print(e)
|
||||||
return ERR_CODE_NO_RESTART
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
# Find Plugins and import their default configs
|
||||||
|
plugins = fHDHR.plugins.PluginsHandler(settings)
|
||||||
|
|
||||||
|
# Apply User Configuration
|
||||||
|
settings.user_config()
|
||||||
|
settings.config_verification()
|
||||||
|
|
||||||
|
# Setup Logging
|
||||||
logger = fHDHR.logger.Logger(settings)
|
logger = fHDHR.logger.Logger(settings)
|
||||||
|
|
||||||
|
# Setup Database
|
||||||
db = fHDHRdb(settings)
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
return run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg)
|
# Setup Plugins
|
||||||
|
plugins.load_plugins(logger, db)
|
||||||
|
plugins.setup()
|
||||||
|
settings.config_verification_plugins()
|
||||||
|
|
||||||
|
if not len([x for x in list(plugins.plugins.keys()) if plugins.plugins[x].type == "origin"]):
|
||||||
|
print("No Origin Plugins found.")
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
return run(settings, logger, db, script_dir, fHDHR_web, plugins)
|
||||||
|
|
||||||
|
|
||||||
def main(script_dir, fHDHR_web, origin, alternative_epg):
|
def main(script_dir, fHDHR_web):
|
||||||
"""fHDHR run script entry point"""
|
"""fHDHR run script entry point"""
|
||||||
|
|
||||||
print("Loading fHDHR %s" % fHDHR_VERSION)
|
print("Loading fHDHR %s" % fHDHR_VERSION)
|
||||||
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
||||||
print("Loading Origin Service: %s %s" % (origin.ORIGIN_NAME, origin.ORIGIN_VERSION))
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
args = build_args_parser()
|
args = build_args_parser()
|
||||||
while True:
|
while True:
|
||||||
returned_code = start(args, script_dir, fHDHR_web, origin, alternative_epg)
|
returned_code = start(args, script_dir, fHDHR_web)
|
||||||
if returned_code not in ["restart"]:
|
if returned_code not in ["restart"]:
|
||||||
return returned_code
|
return returned_code
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
|
|||||||
@ -3,7 +3,6 @@ import sys
|
|||||||
import random
|
import random
|
||||||
import configparser
|
import configparser
|
||||||
import pathlib
|
import pathlib
|
||||||
import subprocess
|
|
||||||
import platform
|
import platform
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -14,31 +13,28 @@ from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
|||||||
|
|
||||||
class Config():
|
class Config():
|
||||||
|
|
||||||
def __init__(self, filename, script_dir, origin, fHDHR_web):
|
def __init__(self, filename, script_dir, fHDHR_web):
|
||||||
self.origin = origin
|
|
||||||
self.fHDHR_web = fHDHR_web
|
self.fHDHR_web = fHDHR_web
|
||||||
|
|
||||||
self.internal = {}
|
self.internal = {}
|
||||||
self.conf_default = {}
|
self.conf_default = {}
|
||||||
self.dict = {}
|
self.dict = {}
|
||||||
|
self.internal["versions"] = {}
|
||||||
self.config_file = filename
|
self.config_file = filename
|
||||||
|
|
||||||
self.initial_load(script_dir)
|
self.core_setup(script_dir)
|
||||||
self.config_verification()
|
|
||||||
|
|
||||||
def initial_load(self, script_dir):
|
def core_setup(self, script_dir):
|
||||||
|
|
||||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
|
internal_plugins_dir = pathlib.Path(script_dir).joinpath('plugins')
|
||||||
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
||||||
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
||||||
origin_dir = pathlib.Path(script_dir).joinpath('origin')
|
|
||||||
|
|
||||||
self.internal["paths"] = {
|
self.internal["paths"] = {
|
||||||
"script_dir": script_dir,
|
"script_dir": script_dir,
|
||||||
"data_dir": data_dir,
|
"data_dir": data_dir,
|
||||||
"alternative_epg": pathlib.Path(script_dir).joinpath('alternative_epg'),
|
"plugins_dir": [internal_plugins_dir],
|
||||||
"origin": origin_dir,
|
|
||||||
"origin_web": pathlib.Path(origin_dir).joinpath('origin_web'),
|
|
||||||
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||||
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||||
"fHDHR_web_dir": fHDHR_web_dir,
|
"fHDHR_web_dir": fHDHR_web_dir,
|
||||||
@ -57,38 +53,61 @@ class Config():
|
|||||||
if str(file_item_path).endswith("_conf.json"):
|
if str(file_item_path).endswith("_conf.json"):
|
||||||
self.read_json_config(file_item_path)
|
self.read_json_config(file_item_path)
|
||||||
|
|
||||||
for dir_type in ["alternative_epg", "origin"]:
|
self.dict["epg"]["valid_methods"] = {None: {}}
|
||||||
|
self.dict["origins"] = {}
|
||||||
for file_item in os.listdir(self.internal["paths"][dir_type]):
|
self.dict["origins"]["valid_methods"] = {}
|
||||||
file_item_path = pathlib.Path(self.internal["paths"][dir_type]).joinpath(file_item)
|
self.dict["streaming"]["valid_methods"] = {"direct": {}}
|
||||||
if file_item_path.is_dir():
|
self.dict["plugin_web_paths"] = {}
|
||||||
for sub_file_item in os.listdir(file_item_path):
|
|
||||||
sub_file_item_path = pathlib.Path(file_item_path).joinpath(sub_file_item)
|
|
||||||
if str(sub_file_item_path).endswith("_conf.json"):
|
|
||||||
self.read_json_config(sub_file_item_path)
|
|
||||||
else:
|
|
||||||
if str(file_item_path).endswith("_conf.json"):
|
|
||||||
self.read_json_config(file_item_path)
|
|
||||||
|
|
||||||
print("Loading Configuration File: %s" % self.config_file)
|
|
||||||
self.read_ini_config(self.config_file)
|
|
||||||
|
|
||||||
self.load_versions()
|
self.load_versions()
|
||||||
|
|
||||||
|
def register_web_path(self, name, path, plugin_dict_name):
|
||||||
|
self.dict["plugin_web_paths"][name.lower()] = {
|
||||||
|
"name": name,
|
||||||
|
"namespace": name.lower(),
|
||||||
|
"path": path,
|
||||||
|
"plugin": plugin_dict_name
|
||||||
|
}
|
||||||
|
|
||||||
|
def register_valid_origin_method(self, method_item):
|
||||||
|
self.dict["origins"]["valid_methods"][method_item.lower()] = {
|
||||||
|
"name": method_item,
|
||||||
|
"namespace": method_item.lower(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def register_valid_streaming_method(self, method_item, plugin_dict_name):
|
||||||
|
self.dict["streaming"]["valid_methods"][method_item.lower()] = {
|
||||||
|
"name": method_item,
|
||||||
|
"namespace": method_item.lower(),
|
||||||
|
"plugin": plugin_dict_name
|
||||||
|
}
|
||||||
|
|
||||||
|
def register_valid_epg_method(self, method_item, plugin_dict_name):
|
||||||
|
self.dict["epg"]["valid_methods"][method_item.lower()] = {
|
||||||
|
"name": method_item,
|
||||||
|
"namespace": method_item.lower(),
|
||||||
|
"plugin": plugin_dict_name
|
||||||
|
}
|
||||||
|
|
||||||
|
def register_version(self, item_name, item_version, item_type):
|
||||||
|
self.internal["versions"][item_name] = {
|
||||||
|
"name": item_name,
|
||||||
|
"version": item_version,
|
||||||
|
"type": item_type
|
||||||
|
}
|
||||||
|
|
||||||
|
def import_conf_json(self, file_item_path):
|
||||||
|
self.read_json_config(file_item_path)
|
||||||
|
|
||||||
def load_versions(self):
|
def load_versions(self):
|
||||||
|
|
||||||
self.internal["versions"] = {}
|
self.register_version("fHDHR", fHDHR_VERSION, "fHDHR")
|
||||||
|
self.register_version("fHDHR_web", self.fHDHR_web.fHDHR_web_VERSION, "fHDHR")
|
||||||
|
|
||||||
self.internal["versions"]["fHDHR"] = fHDHR_VERSION
|
self.register_version("Python", sys.version, "env")
|
||||||
|
|
||||||
self.internal["versions"]["fHDHR_web"] = self.fHDHR_web.fHDHR_web_VERSION
|
|
||||||
|
|
||||||
self.internal["versions"][self.origin.ORIGIN_NAME] = self.origin.ORIGIN_VERSION
|
|
||||||
|
|
||||||
self.internal["versions"]["Python"] = sys.version
|
|
||||||
|
|
||||||
opersystem = platform.system()
|
opersystem = platform.system()
|
||||||
self.internal["versions"]["Operating System"] = opersystem
|
self.register_version("Operating System", opersystem, "env")
|
||||||
if opersystem in ["Linux", "Darwin"]:
|
if opersystem in ["Linux", "Darwin"]:
|
||||||
# Linux/Mac
|
# Linux/Mac
|
||||||
if os.getuid() == 0 or os.geteuid() == 0:
|
if os.getuid() == 0 or os.geteuid() == 0:
|
||||||
@ -101,43 +120,90 @@ class Config():
|
|||||||
print("Uncommon Operating System, use at your own risk.")
|
print("Uncommon Operating System, use at your own risk.")
|
||||||
|
|
||||||
isdocker = is_docker()
|
isdocker = is_docker()
|
||||||
self.internal["versions"]["Docker"] = isdocker
|
self.register_version("Docker", isdocker, "env")
|
||||||
|
|
||||||
if self.dict["streaming"]["method"] == "ffmpeg":
|
def user_config(self):
|
||||||
try:
|
print("Loading Configuration File: %s" % self.config_file)
|
||||||
ffmpeg_command = [self.dict["ffmpeg"]["path"],
|
self.read_ini_config(self.config_file)
|
||||||
"-version",
|
|
||||||
"pipe:stdout"
|
|
||||||
]
|
|
||||||
|
|
||||||
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
def config_verification_plugins(self):
|
||||||
ffmpeg_version = ffmpeg_proc.stdout.read()
|
required_missing = {}
|
||||||
ffmpeg_proc.terminate()
|
# create dict and combine items
|
||||||
ffmpeg_proc.communicate()
|
for config_section in list(self.conf_default.keys()):
|
||||||
ffmpeg_proc.kill()
|
for config_item in list(self.conf_default[config_section].keys()):
|
||||||
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
if self.conf_default[config_section][config_item]["required"]:
|
||||||
except FileNotFoundError:
|
if not self.dict[config_section][config_item]:
|
||||||
ffmpeg_version = "Missing"
|
if config_section not in list(required_missing.keys()):
|
||||||
print("Failed to find ffmpeg.")
|
required_missing[config_section] = []
|
||||||
self.internal["versions"]["ffmpeg"] = ffmpeg_version
|
required_missing[config_section].append(config_item)
|
||||||
|
for config_section in list(required_missing.keys()):
|
||||||
|
print("Warning! Required configuration options missing: [%s]%s" % (config_section, ", ".join(required_missing[config_section])))
|
||||||
|
|
||||||
if self.dict["streaming"]["method"] == "vlc":
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
try:
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
vlc_command = [self.dict["vlc"]["path"],
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
"--version",
|
epg_methods = []
|
||||||
"pipe:stdout"
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
]
|
if epg_method in list(self.dict["epg"]["valid_methods"].keys()):
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
elif epg_method in list(self.dict["origins"]["valid_methods"].keys()):
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
if self.dict["epg"]["method"]:
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
else:
|
||||||
|
self.dict["epg"]["def_method"] = None
|
||||||
|
|
||||||
vlc_proc = subprocess.Popen(vlc_command, stdout=subprocess.PIPE)
|
if self.dict["streaming"]["method"] not in self.dict["streaming"]["valid_methods"]:
|
||||||
vlc_version = vlc_proc.stdout.read()
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
vlc_proc.terminate()
|
|
||||||
vlc_proc.communicate()
|
def config_verification(self):
|
||||||
vlc_proc.kill()
|
|
||||||
vlc_version = vlc_version.decode().split("version ")[1].split('\n')[0]
|
if not self.dict["main"]["uuid"]:
|
||||||
except FileNotFoundError:
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
vlc_version = "Missing"
|
self.write('uuid', self.dict["main"]["uuid"], 'main')
|
||||||
print("Failed to find vlc.")
|
|
||||||
self.internal["versions"]["vlc"] = vlc_version
|
if self.dict["main"]["cache_dir"]:
|
||||||
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
|
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
|
cache_dir = self.internal["paths"]["cache_dir"]
|
||||||
|
|
||||||
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
|
self.internal["paths"]["logs_dir"] = logs_dir
|
||||||
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
|
|
||||||
|
def get_real_conf_value(self, key, confvalue):
|
||||||
|
if not confvalue:
|
||||||
|
confvalue = None
|
||||||
|
elif key == "xmltv_offset":
|
||||||
|
confvalue = str(confvalue)
|
||||||
|
elif str(confvalue) in ["0"]:
|
||||||
|
confvalue = 0
|
||||||
|
elif isint(confvalue):
|
||||||
|
confvalue = int(confvalue)
|
||||||
|
elif isfloat(confvalue):
|
||||||
|
confvalue = float(confvalue)
|
||||||
|
elif is_arithmetic(confvalue):
|
||||||
|
confvalue = eval(confvalue)
|
||||||
|
elif "," in confvalue:
|
||||||
|
confvalue = confvalue.split(",")
|
||||||
|
elif str(confvalue).lower() in ["none", ""]:
|
||||||
|
confvalue = None
|
||||||
|
elif str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
return confvalue
|
||||||
|
|
||||||
def read_json_config(self, conffilepath):
|
def read_json_config(self, conffilepath):
|
||||||
with open(conffilepath, 'r') as jsonconf:
|
with open(conffilepath, 'r') as jsonconf:
|
||||||
@ -155,29 +221,13 @@ class Config():
|
|||||||
if key not in list(self.conf_default[section].keys()):
|
if key not in list(self.conf_default[section].keys()):
|
||||||
self.conf_default[section][key] = {}
|
self.conf_default[section][key] = {}
|
||||||
|
|
||||||
confvalue = confimport[section][key]["value"]
|
confvalue = self.get_real_conf_value(key, confimport[section][key]["value"])
|
||||||
if key == "xmltv_offset":
|
|
||||||
confvalue = str(confvalue)
|
|
||||||
elif isint(confvalue):
|
|
||||||
confvalue = int(confvalue)
|
|
||||||
elif isfloat(confvalue):
|
|
||||||
confvalue = float(confvalue)
|
|
||||||
elif is_arithmetic(confvalue):
|
|
||||||
confvalue = eval(confvalue)
|
|
||||||
elif "," in confvalue:
|
|
||||||
confvalue = confvalue.split(",")
|
|
||||||
elif str(confvalue).lower() in ["none"]:
|
|
||||||
confvalue = None
|
|
||||||
elif str(confvalue).lower() in ["false"]:
|
|
||||||
confvalue = False
|
|
||||||
elif str(confvalue).lower() in ["true"]:
|
|
||||||
confvalue = True
|
|
||||||
|
|
||||||
self.dict[section][key] = confvalue
|
self.dict[section][key] = confvalue
|
||||||
|
|
||||||
self.conf_default[section][key]["value"] = confvalue
|
self.conf_default[section][key]["value"] = confvalue
|
||||||
|
|
||||||
for config_option in ["config_web_hidden", "config_file", "config_web"]:
|
for config_option in ["config_web_hidden", "config_file", "config_web", "required"]:
|
||||||
if config_option not in list(confimport[section][key].keys()):
|
if config_option not in list(confimport[section][key].keys()):
|
||||||
config_option_value = False
|
config_option_value = False
|
||||||
else:
|
else:
|
||||||
@ -197,24 +247,7 @@ class Config():
|
|||||||
if each_section.lower() not in list(self.dict.keys()):
|
if each_section.lower() not in list(self.dict.keys()):
|
||||||
self.dict[each_section.lower()] = {}
|
self.dict[each_section.lower()] = {}
|
||||||
for (each_key, each_val) in config_handler.items(each_section):
|
for (each_key, each_val) in config_handler.items(each_section):
|
||||||
if not each_val:
|
each_val = self.get_real_conf_value(each_key, each_val)
|
||||||
each_val = None
|
|
||||||
elif each_key == "xmltv_offset":
|
|
||||||
each_val = str(each_val)
|
|
||||||
elif each_val.lower() in ["none"]:
|
|
||||||
each_val = None
|
|
||||||
elif each_val.lower() in ["false"]:
|
|
||||||
each_val = False
|
|
||||||
elif each_val.lower() in ["true"]:
|
|
||||||
each_val = True
|
|
||||||
elif isint(each_val):
|
|
||||||
each_val = int(each_val)
|
|
||||||
elif isfloat(each_val):
|
|
||||||
each_val = float(each_val)
|
|
||||||
elif is_arithmetic(each_val):
|
|
||||||
each_val = eval(each_val)
|
|
||||||
elif "," in each_val:
|
|
||||||
each_val = each_val.split(",")
|
|
||||||
|
|
||||||
import_val = True
|
import_val = True
|
||||||
if each_section in list(self.conf_default.keys()):
|
if each_section in list(self.conf_default.keys()):
|
||||||
@ -225,7 +258,7 @@ class Config():
|
|||||||
if import_val:
|
if import_val:
|
||||||
self.dict[each_section.lower()][each_key.lower()] = each_val
|
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
def write(self, section, key, value):
|
def write(self, key, value, section):
|
||||||
|
|
||||||
if not value:
|
if not value:
|
||||||
value = None
|
value = None
|
||||||
@ -242,10 +275,7 @@ class Config():
|
|||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
",".join(value)
|
",".join(value)
|
||||||
|
|
||||||
if section == self.dict["main"]["dictpopname"]:
|
self.dict[section][key] = value
|
||||||
self.dict["origin"][key] = value
|
|
||||||
else:
|
|
||||||
self.dict[section][key] = value
|
|
||||||
|
|
||||||
config_handler = configparser.ConfigParser()
|
config_handler = configparser.ConfigParser()
|
||||||
config_handler.read(self.config_file)
|
config_handler.read(self.config_file)
|
||||||
@ -258,66 +288,6 @@ class Config():
|
|||||||
with open(self.config_file, 'w') as config_file:
|
with open(self.config_file, 'w') as config_file:
|
||||||
config_handler.write(config_file)
|
config_handler.write(config_file)
|
||||||
|
|
||||||
def config_verification(self):
|
|
||||||
|
|
||||||
if self.dict["main"]["required"]:
|
|
||||||
required_missing = []
|
|
||||||
if isinstance(self.dict["main"]["required"], str):
|
|
||||||
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
|
||||||
if len(self.dict["main"]["required"]):
|
|
||||||
for req_item in self.dict["main"]["required"]:
|
|
||||||
req_section = req_item.split("/")[0]
|
|
||||||
req_key = req_item.split("/")[1]
|
|
||||||
if not self.dict[req_section][req_key]:
|
|
||||||
required_missing.append(req_item)
|
|
||||||
if len(required_missing):
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: %s" % ", ".join(required_missing))
|
|
||||||
|
|
||||||
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
|
||||||
|
|
||||||
if isinstance(self.dict["epg"]["valid_epg_methods"], str):
|
|
||||||
self.dict["epg"]["valid_epg_methods"] = [self.dict["epg"]["valid_epg_methods"]]
|
|
||||||
|
|
||||||
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
|
||||||
if isinstance(self.dict["epg"]["method"], str):
|
|
||||||
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
|
||||||
epg_methods = []
|
|
||||||
for epg_method in self.dict["epg"]["method"]:
|
|
||||||
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
|
||||||
epg_methods.append("origin")
|
|
||||||
elif epg_method in ["None"]:
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
|
||||||
elif epg_method in self.dict["epg"]["valid_epg_methods"]:
|
|
||||||
epg_methods.append(epg_method)
|
|
||||||
else:
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
|
||||||
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
|
||||||
|
|
||||||
if not self.dict["main"]["uuid"]:
|
|
||||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
|
||||||
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
|
||||||
|
|
||||||
if self.dict["main"]["cache_dir"]:
|
|
||||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
|
||||||
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
|
||||||
cache_dir = self.internal["paths"]["cache_dir"]
|
|
||||||
|
|
||||||
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
|
||||||
self.internal["paths"]["logs_dir"] = logs_dir
|
|
||||||
if not logs_dir.is_dir():
|
|
||||||
logs_dir.mkdir()
|
|
||||||
|
|
||||||
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
|
||||||
|
|
||||||
if self.dict["streaming"]["method"] not in ["direct", "ffmpeg", "vlc"]:
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
|
||||||
|
|
||||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
|
||||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
|
||||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
|
||||||
self.dict["fhdhr"]["discovery_address"] = None
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
if name in list(self.dict.keys()):
|
if name in list(self.dict.keys()):
|
||||||
|
|||||||
@ -32,28 +32,10 @@ MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
|||||||
'mysql_collate': 'utf8mb4_unicode_ci'}
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
class ChannelValues(BASE):
|
class PluginValues(BASE):
|
||||||
__tablename__ = 'channel_values'
|
__tablename__ = 'plugin_values'
|
||||||
__table_args__ = MYSQL_TABLE_ARGS
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
channel = Column(String(255), primary_key=True)
|
pluginitem = Column(String(255), primary_key=True)
|
||||||
namespace = Column(String(255), primary_key=True)
|
|
||||||
key = Column(String(255), primary_key=True)
|
|
||||||
value = Column(Text())
|
|
||||||
|
|
||||||
|
|
||||||
class ProgramValues(BASE):
|
|
||||||
__tablename__ = 'program_values'
|
|
||||||
__table_args__ = MYSQL_TABLE_ARGS
|
|
||||||
program = Column(String(255), primary_key=True)
|
|
||||||
namespace = Column(String(255), primary_key=True)
|
|
||||||
key = Column(String(255), primary_key=True)
|
|
||||||
value = Column(Text())
|
|
||||||
|
|
||||||
|
|
||||||
class CacheValues(BASE):
|
|
||||||
__tablename__ = 'cache_values'
|
|
||||||
__table_args__ = MYSQL_TABLE_ARGS
|
|
||||||
cacheitem = Column(String(255), primary_key=True)
|
|
||||||
namespace = Column(String(255), primary_key=True)
|
namespace = Column(String(255), primary_key=True)
|
||||||
key = Column(String(255), primary_key=True)
|
key = Column(String(255), primary_key=True)
|
||||||
value = Column(Text())
|
value = Column(Text())
|
||||||
@ -148,198 +130,6 @@ class fHDHRdb(object):
|
|||||||
def get_uri(self):
|
def get_uri(self):
|
||||||
return self.url
|
return self.url
|
||||||
|
|
||||||
# Channel Values
|
|
||||||
|
|
||||||
def set_channel_value(self, channel, key, value, namespace='default'):
|
|
||||||
channel = channel.lower()
|
|
||||||
value = json.dumps(value, ensure_ascii=False)
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ChannelValues) \
|
|
||||||
.filter(ChannelValues.channel == channel)\
|
|
||||||
.filter(ChannelValues.namespace == namespace)\
|
|
||||||
.filter(ChannelValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ChannelValues exists, update
|
|
||||||
if result:
|
|
||||||
result.value = value
|
|
||||||
session.commit()
|
|
||||||
# DNE - Insert
|
|
||||||
else:
|
|
||||||
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
|
||||||
session.add(new_channelvalue)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def get_channel_value(self, channel, key, namespace='default'):
|
|
||||||
channel = channel.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ChannelValues) \
|
|
||||||
.filter(ChannelValues.channel == channel)\
|
|
||||||
.filter(ChannelValues.namespace == namespace)\
|
|
||||||
.filter(ChannelValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
if result is not None:
|
|
||||||
result = result.value
|
|
||||||
return _deserialize(result)
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def delete_channel_value(self, channel, key, namespace='default'):
|
|
||||||
channel = channel.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ChannelValues) \
|
|
||||||
.filter(ChannelValues.channel == channel)\
|
|
||||||
.filter(ChannelValues.namespace == namespace)\
|
|
||||||
.filter(ChannelValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ChannelValues exists, delete
|
|
||||||
if result:
|
|
||||||
session.delete(result)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
# Program Values
|
|
||||||
|
|
||||||
def set_program_value(self, program, key, value, namespace='default'):
|
|
||||||
program = program.lower()
|
|
||||||
value = json.dumps(value, ensure_ascii=False)
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ProgramValues) \
|
|
||||||
.filter(ProgramValues.program == program)\
|
|
||||||
.filter(ProgramValues.namespace == namespace)\
|
|
||||||
.filter(ProgramValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, update
|
|
||||||
if result:
|
|
||||||
result.value = value
|
|
||||||
session.commit()
|
|
||||||
# DNE - Insert
|
|
||||||
else:
|
|
||||||
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
|
||||||
session.add(new_programvalue)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def get_program_value(self, program, key, namespace='default'):
|
|
||||||
program = program.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ProgramValues) \
|
|
||||||
.filter(ProgramValues.program == program)\
|
|
||||||
.filter(ProgramValues.namespace == namespace)\
|
|
||||||
.filter(ProgramValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
if result is not None:
|
|
||||||
result = result.value
|
|
||||||
return _deserialize(result)
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def delete_program_value(self, program, key, namespace='default'):
|
|
||||||
program = program.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(ProgramValues) \
|
|
||||||
.filter(ProgramValues.program == program)\
|
|
||||||
.filter(ProgramValues.namespace == namespace)\
|
|
||||||
.filter(ProgramValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, delete
|
|
||||||
if result:
|
|
||||||
session.delete(result)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
# Cache Values
|
|
||||||
|
|
||||||
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
|
||||||
cacheitem = cacheitem.lower()
|
|
||||||
value = json.dumps(value, ensure_ascii=False)
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(CacheValues) \
|
|
||||||
.filter(CacheValues.cacheitem == cacheitem)\
|
|
||||||
.filter(CacheValues.namespace == namespace)\
|
|
||||||
.filter(CacheValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, update
|
|
||||||
if result:
|
|
||||||
result.value = value
|
|
||||||
session.commit()
|
|
||||||
# DNE - Insert
|
|
||||||
else:
|
|
||||||
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
|
||||||
session.add(new_cacheitemvalue)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
|
||||||
cacheitem = cacheitem.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(CacheValues) \
|
|
||||||
.filter(CacheValues.cacheitem == cacheitem)\
|
|
||||||
.filter(CacheValues.namespace == namespace)\
|
|
||||||
.filter(CacheValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
if result is not None:
|
|
||||||
result = result.value
|
|
||||||
return _deserialize(result)
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
|
||||||
cacheitem = cacheitem.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(CacheValues) \
|
|
||||||
.filter(CacheValues.cacheitem == cacheitem)\
|
|
||||||
.filter(CacheValues.namespace == namespace)\
|
|
||||||
.filter(CacheValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, delete
|
|
||||||
if result:
|
|
||||||
session.delete(result)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
# fHDHR Values
|
# fHDHR Values
|
||||||
|
|
||||||
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
@ -358,8 +148,8 @@ class fHDHRdb(object):
|
|||||||
session.commit()
|
session.commit()
|
||||||
# DNE - Insert
|
# DNE - Insert
|
||||||
else:
|
else:
|
||||||
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
new_pluginitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
session.add(new_cacheitemvalue)
|
session.add(new_pluginitemvalue)
|
||||||
session.commit()
|
session.commit()
|
||||||
except SQLAlchemyError:
|
except SQLAlchemyError:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
@ -403,3 +193,67 @@ class fHDHRdb(object):
|
|||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
|
# Plugin Values
|
||||||
|
|
||||||
|
def set_plugin_value(self, pluginitem, key, value, namespace='default'):
|
||||||
|
pluginitem = pluginitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(PluginValues) \
|
||||||
|
.filter(PluginValues.pluginitem == pluginitem)\
|
||||||
|
.filter(PluginValues.namespace == namespace)\
|
||||||
|
.filter(PluginValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_pluginitemvalue = PluginValues(pluginitem=pluginitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_pluginitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_plugin_value(self, pluginitem, key, namespace='default'):
|
||||||
|
pluginitem = pluginitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(PluginValues) \
|
||||||
|
.filter(PluginValues.pluginitem == pluginitem)\
|
||||||
|
.filter(PluginValues.namespace == namespace)\
|
||||||
|
.filter(PluginValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_plugin_value(self, pluginitem, key, namespace='default'):
|
||||||
|
pluginitem = pluginitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(PluginValues) \
|
||||||
|
.filter(PluginValues.pluginitem == pluginitem)\
|
||||||
|
.filter(PluginValues.namespace == namespace)\
|
||||||
|
.filter(PluginValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|||||||
@ -3,16 +3,16 @@ from .epg import EPG
|
|||||||
from .tuners import Tuners
|
from .tuners import Tuners
|
||||||
from .images import imageHandler
|
from .images import imageHandler
|
||||||
from .ssdp import SSDPServer
|
from .ssdp import SSDPServer
|
||||||
from .cluster import fHDHR_Cluster
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Device():
|
class fHDHR_Device():
|
||||||
|
|
||||||
def __init__(self, fhdhr, originwrapper, alternative_epg):
|
def __init__(self, fhdhr, origins):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.channels = Channels(fhdhr, originwrapper)
|
self.channels = Channels(fhdhr, origins)
|
||||||
|
|
||||||
self.epg = EPG(fhdhr, self.channels, originwrapper, alternative_epg)
|
self.epg = EPG(fhdhr, self.channels, origins)
|
||||||
|
|
||||||
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||||
|
|
||||||
@ -20,4 +20,15 @@ class fHDHR_Device():
|
|||||||
|
|
||||||
self.ssdp = SSDPServer(fhdhr)
|
self.ssdp = SSDPServer(fhdhr)
|
||||||
|
|
||||||
self.cluster = fHDHR_Cluster(fhdhr, self.ssdp)
|
self.interfaces = {}
|
||||||
|
|
||||||
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].manifest["type"] == "interface":
|
||||||
|
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||||
|
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||||
|
plugin_utils.channels = self.channels
|
||||||
|
plugin_utils.epg = self.epg
|
||||||
|
plugin_utils.tuners = self.tuners
|
||||||
|
plugin_utils.images = self.images
|
||||||
|
plugin_utils.ssdp = self.ssdp
|
||||||
|
self.interfaces[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, plugin_utils)
|
||||||
|
|||||||
@ -8,57 +8,111 @@ from .chan_ident import Channel_IDs
|
|||||||
|
|
||||||
class Channels():
|
class Channels():
|
||||||
|
|
||||||
def __init__(self, fhdhr, originwrapper):
|
def __init__(self, fhdhr, origins):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.origin = originwrapper
|
self.origins = origins
|
||||||
|
|
||||||
self.id_system = Channel_IDs(fhdhr)
|
self.id_system = Channel_IDs(fhdhr, origins)
|
||||||
|
|
||||||
self.list = {}
|
self.list = {}
|
||||||
|
for origin in list(self.origins.origins_dict.keys()):
|
||||||
|
self.list[origin] = {}
|
||||||
|
|
||||||
self.get_db_channels()
|
self.get_db_channels()
|
||||||
|
|
||||||
def get_channel_obj(self, keyfind, valfind):
|
def get_channel_obj(self, keyfind, valfind, origin=None):
|
||||||
if keyfind == "number":
|
if origin:
|
||||||
return next(self.list[fhdhr_id] for fhdhr_id in [x["id"] for x in self.get_channels()] if self.list[fhdhr_id].number == valfind) or None
|
origin = origin.lower()
|
||||||
|
if keyfind == "number":
|
||||||
|
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
||||||
|
else:
|
||||||
|
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
||||||
|
if len(matches):
|
||||||
|
return self.list[origin][matches[0]]
|
||||||
else:
|
else:
|
||||||
return next(self.list[fhdhr_id] for fhdhr_id in [x["id"] for x in self.get_channels()] if self.list[fhdhr_id].dict[keyfind] == valfind) or None
|
matches = []
|
||||||
|
for origin in list(self.list.keys()):
|
||||||
|
if keyfind == "number":
|
||||||
|
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
||||||
|
else:
|
||||||
|
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
||||||
|
if len(matches):
|
||||||
|
return self.list[origin][matches[0]]
|
||||||
|
if len(matches):
|
||||||
|
return self.list[origin][matches[0]]
|
||||||
|
return None
|
||||||
|
|
||||||
def get_channel_list(self, keyfind):
|
def get_channel_list(self, keyfind, origin=None):
|
||||||
if keyfind == "number":
|
if origin:
|
||||||
return [self.list[x].number for x in [x["id"] for x in self.get_channels()]]
|
if keyfind == "number":
|
||||||
|
return [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
||||||
|
else:
|
||||||
|
return [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
||||||
else:
|
else:
|
||||||
return [self.list[x].dict[keyfind] for x in [x["id"] for x in self.get_channels()]]
|
matches = []
|
||||||
|
for origin in list(self.list.keys()):
|
||||||
|
if keyfind == "number":
|
||||||
|
next_match = [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
||||||
|
else:
|
||||||
|
next_match = [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
||||||
|
if len(next_match):
|
||||||
|
matches.append(next_match)
|
||||||
|
return matches[0]
|
||||||
|
|
||||||
def set_channel_status(self, keyfind, valfind, updatedict):
|
def get_channel_dict(self, keyfind, valfind, origin=None):
|
||||||
self.get_channel_obj(keyfind, valfind).set_status(updatedict)
|
chan_obj = self.get_channel_obj(keyfind, valfind, origin)
|
||||||
|
if chan_obj:
|
||||||
|
return chan_obj.dict
|
||||||
|
return None
|
||||||
|
|
||||||
def set_channel_enablement_all(self, enablement):
|
def set_channel_status(self, keyfind, valfind, updatedict, origin):
|
||||||
for fhdhr_id in [x["id"] for x in self.get_channels()]:
|
self.get_channel_obj(keyfind, valfind, origin).set_status(updatedict)
|
||||||
self.list[fhdhr_id].set_enablement(enablement)
|
|
||||||
|
|
||||||
def set_channel_enablement(self, keyfind, valfind, enablement):
|
def set_channel_enablement_all(self, enablement, origin):
|
||||||
self.get_channel_obj(keyfind, valfind).set_enablement(enablement)
|
for fhdhr_id in [x["id"] for x in self.get_channels(origin)]:
|
||||||
|
self.list[fhdhr_id].set_enablement(enablement, origin)
|
||||||
|
|
||||||
def set_channel_favorite(self, keyfind, valfind, enablement):
|
def set_channel_enablement(self, keyfind, valfind, enablement, origin):
|
||||||
self.get_channel_obj(keyfind, valfind).set_favorite(enablement)
|
self.get_channel_obj(keyfind, valfind, origin).set_enablement(enablement)
|
||||||
|
|
||||||
def get_db_channels(self):
|
def set_channel_favorite(self, keyfind, valfind, enablement, origin):
|
||||||
self.fhdhr.logger.info("Checking for Channel information stored in the database.")
|
self.get_channel_obj(keyfind, valfind, origin).set_favorite(enablement)
|
||||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
|
||||||
if len(channel_ids):
|
|
||||||
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
|
||||||
for channel_id in channel_ids:
|
|
||||||
channel_obj = Channel(self.fhdhr, self.id_system, channel_id=channel_id)
|
|
||||||
channel_id = channel_obj.dict["id"]
|
|
||||||
self.list[channel_id] = channel_obj
|
|
||||||
|
|
||||||
def save_db_channels(self):
|
def get_db_channels(self, origin=None):
|
||||||
channel_ids = [x["id"] for x in self.get_channels()]
|
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
|
||||||
|
|
||||||
def get_channels(self, forceupdate=False):
|
if not origin:
|
||||||
|
origins_list = list(self.list.keys())
|
||||||
|
else:
|
||||||
|
origins_list = origin.lower()
|
||||||
|
|
||||||
|
if isinstance(origins_list, str):
|
||||||
|
origins_list = [origins_list]
|
||||||
|
|
||||||
|
for origin in origins_list:
|
||||||
|
self.fhdhr.logger.info("Checking for %s Channel information stored in the database." % origin)
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||||
|
if len(channel_ids):
|
||||||
|
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||||
|
for channel_id in channel_ids:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin=origin, channel_id=channel_id)
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
self.list[origin][channel_id] = channel_obj
|
||||||
|
|
||||||
|
def save_db_channels(self, origin=None):
|
||||||
|
if not origin:
|
||||||
|
origins_list = list(self.list.keys())
|
||||||
|
else:
|
||||||
|
origins_list = origin.lower()
|
||||||
|
|
||||||
|
if isinstance(origins_list, str):
|
||||||
|
origins_list = [origins_list]
|
||||||
|
|
||||||
|
for origin in origins_list:
|
||||||
|
channel_ids = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys())]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, origin)
|
||||||
|
|
||||||
|
def get_channels(self, origin=None, forceupdate=False):
|
||||||
"""Pull Channels from origin.
|
"""Pull Channels from origin.
|
||||||
|
|
||||||
Output a list.
|
Output a list.
|
||||||
@ -66,53 +120,64 @@ class Channels():
|
|||||||
Don't pull more often than 12 hours.
|
Don't pull more often than 12 hours.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not len(list(self.list.keys())):
|
if not origin:
|
||||||
self.get_db_channels()
|
origins_list = list(self.list.keys())
|
||||||
|
else:
|
||||||
|
origins_list = origin.lower().lower()
|
||||||
|
|
||||||
if not forceupdate:
|
if isinstance(origins_list, str):
|
||||||
return [self.list[x].dict for x in list(self.list.keys())]
|
origins_list = [origins_list]
|
||||||
|
|
||||||
channel_origin_id_list = [str(self.list[x].dict["origin_id"]) for x in list(self.list.keys())]
|
return_chan_list = []
|
||||||
|
for origin in origins_list:
|
||||||
|
|
||||||
self.fhdhr.logger.info("Performing Channel Scan.")
|
if not len(list(self.list[origin].keys())):
|
||||||
|
self.get_db_channels(origin=origin)
|
||||||
|
|
||||||
channel_dict_list = self.origin.get_channels()
|
if not forceupdate:
|
||||||
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), self.fhdhr.config.dict["main"]["servicename"]))
|
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
||||||
|
|
||||||
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
|
|
||||||
|
|
||||||
newchan = 0
|
|
||||||
chan_scan_start = time.time()
|
|
||||||
for channel_info in channel_dict_list:
|
|
||||||
|
|
||||||
chan_existing = str(channel_info["id"]) in channel_origin_id_list
|
|
||||||
|
|
||||||
if chan_existing:
|
|
||||||
channel_obj = self.get_channel_obj("origin_id", channel_info["id"])
|
|
||||||
else:
|
else:
|
||||||
channel_obj = Channel(self.fhdhr, self.id_system, origin_id=channel_info["id"])
|
|
||||||
|
|
||||||
channel_id = channel_obj.dict["id"]
|
channel_origin_id_list = [str(self.list[origin][x].dict["origin_id"]) for x in list(self.list[origin].keys())]
|
||||||
channel_obj.basics(channel_info)
|
|
||||||
if not chan_existing:
|
|
||||||
self.list[channel_id] = channel_obj
|
|
||||||
newchan += 1
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Channel Import took %s" % humanized_time(time.time() - chan_scan_start))
|
self.fhdhr.logger.info("Performing Channel Scan for %s." % origin)
|
||||||
|
|
||||||
if not newchan:
|
channel_dict_list = self.origins.origins_dict[origin].get_channels()
|
||||||
newchan = "no"
|
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), origin))
|
||||||
self.fhdhr.logger.info("Found %s NEW channels." % newchan)
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Total Channel Count: %s" % len(self.list.keys()))
|
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
|
||||||
self.save_db_channels()
|
|
||||||
|
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time())
|
newchan = 0
|
||||||
|
chan_scan_start = time.time()
|
||||||
|
for channel_info in channel_dict_list:
|
||||||
|
|
||||||
return [self.list[x].dict for x in list(self.list.keys())]
|
chan_existing = str(channel_info["id"]) in channel_origin_id_list
|
||||||
|
|
||||||
def get_channel_stream(self, stream_args):
|
if chan_existing:
|
||||||
return self.origin.get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)
|
channel_obj = self.get_channel_obj("origin_id", channel_info["id"], origin)
|
||||||
|
else:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin, origin_id=channel_info["id"])
|
||||||
|
|
||||||
def get_channel_dict(self, keyfind, valfind):
|
channel_id = channel_obj.dict["id"]
|
||||||
return self.get_channel_obj(keyfind, valfind).dict
|
channel_obj.basics(channel_info)
|
||||||
|
if not chan_existing:
|
||||||
|
self.list[origin][channel_id] = channel_obj
|
||||||
|
newchan += 1
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("%s Channel Import took %s" % (origin, humanized_time(time.time() - chan_scan_start)))
|
||||||
|
|
||||||
|
if not newchan:
|
||||||
|
newchan = "no"
|
||||||
|
self.fhdhr.logger.info("Found %s NEW channels for %s." % (newchan, origin))
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Total %s Channel Count: %s" % (origin, len(self.list[origin].keys())))
|
||||||
|
self.save_db_channels(origin=origin)
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time(), origin)
|
||||||
|
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
||||||
|
|
||||||
|
return return_chan_list
|
||||||
|
|
||||||
|
def get_channel_stream(self, stream_args, origin):
|
||||||
|
return self.origins.origins_dict[origin].get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)
|
||||||
|
|||||||
@ -2,31 +2,32 @@ import uuid
|
|||||||
|
|
||||||
|
|
||||||
class Channel_IDs():
|
class Channel_IDs():
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr, origins):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
self.origins = origins
|
||||||
|
|
||||||
def get(self, origin_id):
|
def get(self, origin_id, origin):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||||
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
||||||
for existing_channel in existing_channel_info:
|
for existing_channel in existing_channel_info:
|
||||||
if existing_channel["origin_id"] == origin_id:
|
if existing_channel["origin_id"] == origin_id:
|
||||||
return existing_channel["id"]
|
return existing_channel["id"]
|
||||||
return self.assign()
|
return self.assign(origin)
|
||||||
|
|
||||||
def assign(self):
|
def assign(self, origin):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||||
channel_id = None
|
channel_id = None
|
||||||
while not channel_id:
|
while not channel_id:
|
||||||
unique_id = str(uuid.uuid4())
|
unique_id = str(uuid.uuid4())
|
||||||
if str(unique_id) not in existing_ids:
|
if str(unique_id) not in existing_ids:
|
||||||
channel_id = str(unique_id)
|
channel_id = str(unique_id)
|
||||||
existing_ids.append(channel_id)
|
existing_ids.append(channel_id)
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids)
|
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids, origin)
|
||||||
return channel_id
|
return channel_id
|
||||||
|
|
||||||
def get_number(self, channel_id):
|
def get_number(self, channel_id, origin):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||||
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
||||||
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||||
if cnumber:
|
if cnumber:
|
||||||
return cnumber
|
return cnumber
|
||||||
|
|||||||
@ -3,22 +3,23 @@ import time
|
|||||||
|
|
||||||
class Channel():
|
class Channel():
|
||||||
|
|
||||||
def __init__(self, fhdhr, id_system, origin_id=None, channel_id=None):
|
def __init__(self, fhdhr, id_system, origin, origin_id=None, channel_id=None):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
self.id_system = id_system
|
self.id_system = id_system
|
||||||
|
|
||||||
if not channel_id:
|
if not channel_id:
|
||||||
if origin_id:
|
if origin_id:
|
||||||
channel_id = id_system.get(origin_id)
|
channel_id = id_system.get(origin_id, origin)
|
||||||
else:
|
else:
|
||||||
channel_id = id_system.assign()
|
channel_id = id_system.assign(origin)
|
||||||
self.channel_id = channel_id
|
self.channel_id = channel_id
|
||||||
|
|
||||||
self.dict = self.fhdhr.db.get_channel_value(str(channel_id), "dict") or self.default_dict
|
self.dict = self.fhdhr.db.get_fhdhr_value(str(channel_id), "dict", self.origin) or self.default_dict
|
||||||
self.verify_dict()
|
self.verify_dict()
|
||||||
|
|
||||||
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def number(self):
|
def number(self):
|
||||||
@ -96,9 +97,9 @@ class Channel():
|
|||||||
self.dict["tags"] = self.dict["origin_tags"]
|
self.dict["tags"] = self.dict["origin_tags"]
|
||||||
|
|
||||||
if "number" not in list(channel_info.keys()):
|
if "number" not in list(channel_info.keys()):
|
||||||
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
||||||
elif not channel_info["number"]:
|
elif not channel_info["number"]:
|
||||||
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
||||||
self.dict["origin_number"] = str(channel_info["number"])
|
self.dict["origin_number"] = str(channel_info["number"])
|
||||||
if not self.dict["number"]:
|
if not self.dict["number"]:
|
||||||
self.dict["number"] = self.dict["origin_number"].split(".")[0]
|
self.dict["number"] = self.dict["origin_number"].split(".")[0]
|
||||||
@ -128,7 +129,7 @@ class Channel():
|
|||||||
if "created" not in list(self.dict.keys()):
|
if "created" not in list(self.dict.keys()):
|
||||||
self.dict["created"] = time.time()
|
self.dict["created"] = time.time()
|
||||||
|
|
||||||
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def default_dict(self):
|
def default_dict(self):
|
||||||
@ -144,64 +145,37 @@ class Channel():
|
|||||||
}
|
}
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
self.fhdhr.db.delete_channel_value(self.dict["id"], "dict")
|
self.fhdhr.db.delete_fhdhr_value(self.dict["id"], "dict", self.origin)
|
||||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
if self.dict["id"] in channel_ids:
|
if self.dict["id"] in channel_ids:
|
||||||
channel_ids.remove(self.dict["id"])
|
channel_ids.remove(self.dict["id"])
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, self.origin)
|
||||||
|
|
||||||
def set_status(self, updatedict):
|
def set_status(self, updatedict):
|
||||||
for key in list(updatedict.keys()):
|
for key in list(updatedict.keys()):
|
||||||
if key == "number":
|
if key == "number":
|
||||||
updatedict[key] = str(updatedict[key])
|
updatedict[key] = str(updatedict[key])
|
||||||
self.dict[key] = updatedict[key]
|
self.dict[key] = updatedict[key]
|
||||||
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||||
|
|
||||||
@property
|
|
||||||
def lineup_dict(self):
|
|
||||||
return {
|
|
||||||
'GuideNumber': self.number,
|
|
||||||
'GuideName': self.dict['name'],
|
|
||||||
'Tags': ",".join(self.dict['tags']),
|
|
||||||
'URL': self.hdhr_stream_url,
|
|
||||||
'HD': self.dict["HD"],
|
|
||||||
"Favorite": self.dict["favorite"],
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def generic_image_url(self):
|
def generic_image_url(self):
|
||||||
return "/api/images?method=generate&type=channel&message=%s" % self.number
|
return "/api/images?method=generate&type=channel&message=%s" % self.number
|
||||||
|
|
||||||
@property
|
|
||||||
def hdhr_stream_url(self):
|
|
||||||
return '/auto/%s' % self.hdhr_stream_ident
|
|
||||||
|
|
||||||
@property
|
|
||||||
def hdhr_stream_ident(self):
|
|
||||||
return 'v%s' % self.number
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rmg_stream_url(self):
|
|
||||||
return "/devices/%s/media/%s" % (self.fhdhr.config.dict["main"]["uuid"], self.rmg_stream_ident)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def rmg_stream_ident(self):
|
|
||||||
return "id://%s" % self.number
|
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def api_stream_url(self):
|
def api_stream_url(self):
|
||||||
return '/api/tuners?method=%s&channel=%s' % (self.fhdhr.config.dict["streaming"]["method"], self.number)
|
return '/api/tuners?method=stream&stream_method=%s&channel=%s&origin=%s' % (self.fhdhr.origins.origins_dict[self.origin].stream_method, self.dict["id"], self.origin)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def m3u_url(self):
|
def api_m3u_url(self):
|
||||||
return '/api/m3u?method=get&channel=%s' % self.number
|
return '/api/m3u?method=get&channel=%s&origin=%s' % (self.dict["id"], self.origin)
|
||||||
|
|
||||||
def set_favorite(self, enablement):
|
def set_favorite(self, enablement):
|
||||||
if enablement == "+":
|
if enablement == "+":
|
||||||
self.dict["favorite"] = 1
|
self.dict["favorite"] = 1
|
||||||
elif enablement == "+":
|
elif enablement == "-":
|
||||||
self.dict["favorite"] = 0
|
self.dict["favorite"] = 0
|
||||||
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
||||||
|
|
||||||
def set_enablement(self, enablement):
|
def set_enablement(self, enablement):
|
||||||
if enablement == "disable":
|
if enablement == "disable":
|
||||||
@ -213,7 +187,7 @@ class Channel():
|
|||||||
self.dict["enabled"] = False
|
self.dict["enabled"] = False
|
||||||
else:
|
else:
|
||||||
self.dict["enabled"] = True
|
self.dict["enabled"] = True
|
||||||
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
|
|||||||
@ -1,158 +0,0 @@
|
|||||||
from collections import OrderedDict
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Cluster():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, ssdp):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.ssdp = ssdp
|
|
||||||
|
|
||||||
self.friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
|
||||||
|
|
||||||
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
|
||||||
self.startup_sync()
|
|
||||||
|
|
||||||
def cluster(self):
|
|
||||||
return self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
|
|
||||||
def get_cluster_dicts_web(self):
|
|
||||||
fhdhr_list = self.cluster()
|
|
||||||
locations = []
|
|
||||||
for location in list(fhdhr_list.keys()):
|
|
||||||
item_dict = {
|
|
||||||
"base_url": fhdhr_list[location]["base_url"],
|
|
||||||
"name": fhdhr_list[location]["name"]
|
|
||||||
}
|
|
||||||
if item_dict["base_url"] != self.fhdhr.api.base:
|
|
||||||
locations.append(item_dict)
|
|
||||||
if len(locations):
|
|
||||||
locations = sorted(locations, key=lambda i: i['name'])
|
|
||||||
return locations
|
|
||||||
else:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_list(self):
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
return_dict = {}
|
|
||||||
for location in list(cluster.keys()):
|
|
||||||
if location != self.fhdhr.api.base:
|
|
||||||
return_dict[location] = {
|
|
||||||
"Joined": True
|
|
||||||
}
|
|
||||||
|
|
||||||
detected_list = self.ssdp.detect_method.get()
|
|
||||||
for location in detected_list:
|
|
||||||
if location not in list(cluster.keys()):
|
|
||||||
return_dict[location] = {
|
|
||||||
"Joined": False
|
|
||||||
}
|
|
||||||
return_dict = OrderedDict(sorted(return_dict.items()))
|
|
||||||
return return_dict
|
|
||||||
|
|
||||||
def default_cluster(self):
|
|
||||||
defdict = {}
|
|
||||||
defdict[self.fhdhr.api.base] = {
|
|
||||||
"base_url": self.fhdhr.api.base,
|
|
||||||
"name": self.friendlyname
|
|
||||||
}
|
|
||||||
return defdict
|
|
||||||
|
|
||||||
def startup_sync(self):
|
|
||||||
self.fhdhr.logger.info("Syncronizing with Cluster.")
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
if not len(list(cluster.keys())):
|
|
||||||
self.fhdhr.logger.info("No Cluster Found.")
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
|
||||||
for location in list(cluster.keys()):
|
|
||||||
if location != self.fhdhr.api.base:
|
|
||||||
self.fhdhr.logger.debug("Checking Cluster Syncronization information from %s." % location)
|
|
||||||
sync_url = "%s/api/cluster?method=get" % location
|
|
||||||
try:
|
|
||||||
sync_open = self.fhdhr.web.session.get(sync_url)
|
|
||||||
retrieved_cluster = sync_open.json()
|
|
||||||
if self.fhdhr.api.base not in list(retrieved_cluster.keys()):
|
|
||||||
return self.leave()
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
|
|
||||||
def leave(self):
|
|
||||||
self.fhdhr.logger.info("Leaving cluster.")
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
|
||||||
|
|
||||||
def disconnect(self):
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
for location in list(cluster.keys()):
|
|
||||||
if location != self.fhdhr.api.base:
|
|
||||||
self.fhdhr.logger.info("Informing %s that I am departing the Cluster." % location)
|
|
||||||
sync_url = "%s/api/cluster?method=del&location=%s" % (location, self.fhdhr.api.base)
|
|
||||||
try:
|
|
||||||
self.fhdhr.web.session.get(sync_url)
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
self.leave()
|
|
||||||
|
|
||||||
def sync(self, location):
|
|
||||||
sync_url = "%s/api/cluster?method=get" % location
|
|
||||||
try:
|
|
||||||
sync_open = self.fhdhr.web.session.get(sync_url)
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
|
|
||||||
def push_sync(self):
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
for location in list(cluster.keys()):
|
|
||||||
if location != self.fhdhr.api.base:
|
|
||||||
sync_url = "%s/api/cluster?method=sync&location=%s" % (location, self.fhdhr.api.base_quoted)
|
|
||||||
try:
|
|
||||||
self.fhdhr.web.session.get(sync_url)
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
|
|
||||||
def add(self, location):
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
if location not in list(cluster.keys()):
|
|
||||||
self.fhdhr.logger.info("Adding %s to cluster." % location)
|
|
||||||
cluster[location] = {"base_url": location}
|
|
||||||
|
|
||||||
location_info_url = "%s/hdhr/discover.json" % location
|
|
||||||
try:
|
|
||||||
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
del cluster[location]
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
|
||||||
return
|
|
||||||
location_info = location_info_req.json()
|
|
||||||
cluster[location]["name"] = location_info["FriendlyName"]
|
|
||||||
|
|
||||||
cluster_info_url = "%s/api/cluster?method=get" % location
|
|
||||||
try:
|
|
||||||
cluster_info_req = self.fhdhr.web.session.get(cluster_info_url)
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
del cluster[location]
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
|
||||||
return
|
|
||||||
cluster_info = cluster_info_req.json()
|
|
||||||
for cluster_key in list(cluster_info.keys()):
|
|
||||||
if cluster_key not in list(cluster.keys()):
|
|
||||||
cluster[cluster_key] = cluster_info[cluster_key]
|
|
||||||
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
|
||||||
self.push_sync()
|
|
||||||
|
|
||||||
def remove(self, location):
|
|
||||||
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
|
||||||
if location in list(cluster.keys()):
|
|
||||||
self.fhdhr.logger.info("Removing %s from cluster." % location)
|
|
||||||
del cluster[location]
|
|
||||||
sync_url = "%s/api/cluster?method=leave" % location
|
|
||||||
try:
|
|
||||||
self.fhdhr.web.session.get(sync_url)
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
self.push_sync()
|
|
||||||
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
|
||||||
@ -1,4 +1,3 @@
|
|||||||
import os
|
|
||||||
import time
|
import time
|
||||||
import datetime
|
import datetime
|
||||||
import threading
|
import threading
|
||||||
@ -10,23 +9,19 @@ from .blocks import blocksEPG
|
|||||||
|
|
||||||
class EPG():
|
class EPG():
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels, originwrapper, alternative_epg):
|
def __init__(self, fhdhr, channels, origins):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.origin = originwrapper
|
self.origins = origins
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
self.alternative_epg = alternative_epg
|
|
||||||
|
|
||||||
self.epgdict = {}
|
self.epgdict = {}
|
||||||
|
|
||||||
self.epg_methods = self.fhdhr.config.dict["epg"]["method"]
|
self.epg_methods = self.fhdhr.config.dict["epg"]["method"] or []
|
||||||
self.valid_epg_methods = [x for x in self.fhdhr.config.dict["epg"]["valid_epg_methods"] if x and x not in [None, "None"]]
|
self.valid_epg_methods = [x for x in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()) if x and x not in [None, "None"]]
|
||||||
|
|
||||||
self.blocks = blocksEPG(self.fhdhr, self.channels)
|
self.blocks = blocksEPG(self.fhdhr, self.channels, self.origins, None)
|
||||||
self.epg_handling = {
|
self.epg_handling = {}
|
||||||
"origin": self.origin,
|
|
||||||
"blocks": self.blocks,
|
|
||||||
}
|
|
||||||
self.epg_method_selfadd()
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||||
@ -45,16 +40,14 @@ class EPG():
|
|||||||
def clear_epg_cache(self, method=None):
|
def clear_epg_cache(self, method=None):
|
||||||
|
|
||||||
if not method:
|
if not method:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
|
if method not in self.valid_epg_methods:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
method = self.def_method
|
method = self.def_method
|
||||||
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
|
||||||
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
|
||||||
method = "origin"
|
|
||||||
|
|
||||||
epgtypename = method
|
self.fhdhr.logger.info("Clearing %s EPG cache." % method)
|
||||||
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
|
||||||
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Clearing %s EPG cache." % epgtypename)
|
|
||||||
|
|
||||||
if hasattr(self.epg_handling[method], 'clear_cache'):
|
if hasattr(self.epg_handling[method], 'clear_cache'):
|
||||||
self.epg_handling[method].clear_cache()
|
self.epg_handling[method].clear_cache()
|
||||||
@ -67,11 +60,17 @@ class EPG():
|
|||||||
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
|
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
|
||||||
nowtime = time.time()
|
nowtime = time.time()
|
||||||
epgdict = self.get_epg(method)
|
epgdict = self.get_epg(method)
|
||||||
try:
|
if channel_number not in list(epgdict.keys()):
|
||||||
listings = epgdict[channel_number]["listing"]
|
epgdict[channel_number] = {
|
||||||
except KeyError:
|
"callsign": "",
|
||||||
listings = []
|
"name": "",
|
||||||
for listing in listings:
|
"number": str(channel_number),
|
||||||
|
"id": "",
|
||||||
|
"thumbnail": "",
|
||||||
|
"listing": []
|
||||||
|
}
|
||||||
|
|
||||||
|
for listing in epgdict[channel_number]["listing"]:
|
||||||
for time_item in ["time_start", "time_end"]:
|
for time_item in ["time_start", "time_end"]:
|
||||||
time_value = listing[time_item]
|
time_value = listing[time_item]
|
||||||
if str(time_value).endswith("+00:00"):
|
if str(time_value).endswith("+00:00"):
|
||||||
@ -91,16 +90,19 @@ class EPG():
|
|||||||
def whats_on_allchans(self, method=None):
|
def whats_on_allchans(self, method=None):
|
||||||
|
|
||||||
if not method:
|
if not method:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
|
method = self.def_method
|
||||||
|
if method not in self.valid_epg_methods:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
method = self.def_method
|
method = self.def_method
|
||||||
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
|
||||||
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
|
||||||
method = "origin"
|
|
||||||
|
|
||||||
channel_guide_dict = {}
|
channel_guide_dict = {}
|
||||||
epgdict = self.get_epg(method)
|
epgdict = self.get_epg(method)
|
||||||
epgdict = epgdict.copy()
|
epgdict = epgdict.copy()
|
||||||
for c in list(epgdict.keys()):
|
for c in list(epgdict.keys()):
|
||||||
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||||
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
channel_number = chan_obj.number
|
channel_number = chan_obj.number
|
||||||
epgdict[channel_number] = epgdict.pop(c)
|
epgdict[channel_number] = epgdict.pop(c)
|
||||||
@ -120,10 +122,13 @@ class EPG():
|
|||||||
def get_epg(self, method=None):
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
if not method:
|
if not method:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
|
method = self.def_method
|
||||||
|
if method not in self.valid_epg_methods:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
method = self.def_method
|
method = self.def_method
|
||||||
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
|
||||||
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
|
||||||
method = "origin"
|
|
||||||
|
|
||||||
if method in list(self.epgdict.keys()):
|
if method in list(self.epgdict.keys()):
|
||||||
return self.epgdict[method]
|
return self.epgdict[method]
|
||||||
@ -154,37 +159,29 @@ class EPG():
|
|||||||
return next(item for item in event_list if item["id"] == event_id) or None
|
return next(item for item in event_list if item["id"] == event_id) or None
|
||||||
|
|
||||||
def epg_method_selfadd(self):
|
def epg_method_selfadd(self):
|
||||||
self.fhdhr.logger.info("Checking for Alternative EPG methods.")
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
new_epgtype_list = []
|
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_epg":
|
||||||
for entry in os.scandir(self.fhdhr.config.internal["paths"]["alternative_epg"]):
|
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||||
if entry.is_file():
|
self.epg_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.channels, self.fhdhr.plugins.plugins[plugin_name].plugin_utils)
|
||||||
if entry.name[0] != '_' and entry.name.endswith(".py"):
|
for origin in list(self.origins.origins_dict.keys()):
|
||||||
new_epgtype_list.append(str(entry.name[:-3]))
|
if origin.lower() not in list(self.epg_handling.keys()):
|
||||||
elif entry.is_dir():
|
self.epg_handling[origin.lower()] = blocksEPG(self.fhdhr, self.channels, self.origins, origin)
|
||||||
if entry.name[0] != '_':
|
self.fhdhr.config.register_valid_epg_method(origin, "Blocks")
|
||||||
new_epgtype_list.append(str(entry.name))
|
self.valid_epg_methods.append(origin.lower())
|
||||||
for method in new_epgtype_list:
|
|
||||||
self.fhdhr.logger.info("Found %s EPG method." % method)
|
|
||||||
self.epg_handling[method] = eval("self.alternative_epg.%s.%sEPG(self.fhdhr, self.channels)" % (method, method))
|
|
||||||
|
|
||||||
def update(self, method=None):
|
def update(self, method=None):
|
||||||
|
|
||||||
if (not method or
|
if not method:
|
||||||
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
if not self.def_method:
|
||||||
|
return
|
||||||
|
method = self.def_method
|
||||||
|
if method not in self.valid_epg_methods:
|
||||||
|
if not self.def_method:
|
||||||
|
return
|
||||||
method = self.def_method
|
method = self.def_method
|
||||||
|
|
||||||
if method == self.fhdhr.config.dict["main"]["dictpopname"]:
|
self.fhdhr.logger.info("Updating %s EPG cache." % method)
|
||||||
method = "origin"
|
programguide = self.epg_handling[method].update_epg()
|
||||||
|
|
||||||
epgtypename = method
|
|
||||||
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
|
||||||
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Updating %s EPG cache." % epgtypename)
|
|
||||||
if method == 'origin':
|
|
||||||
programguide = self.epg_handling['origin'].update_epg(self.channels)
|
|
||||||
else:
|
|
||||||
programguide = self.epg_handling[method].update_epg()
|
|
||||||
|
|
||||||
# sort the channel listings by time stamp
|
# sort the channel listings by time stamp
|
||||||
for cnum in list(programguide.keys()):
|
for cnum in list(programguide.keys()):
|
||||||
@ -200,7 +197,7 @@ class EPG():
|
|||||||
clean_prog_guide[cnum] = programguide[cnum].copy()
|
clean_prog_guide[cnum] = programguide[cnum].copy()
|
||||||
clean_prog_guide[cnum]["listing"] = []
|
clean_prog_guide[cnum]["listing"] = []
|
||||||
|
|
||||||
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||||
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
|
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
|
||||||
else:
|
else:
|
||||||
chan_obj = None
|
chan_obj = None
|
||||||
@ -261,10 +258,10 @@ class EPG():
|
|||||||
programguide = clean_prog_guide.copy()
|
programguide = clean_prog_guide.copy()
|
||||||
|
|
||||||
# if a stock method, generate Blocks EPG for missing channels
|
# if a stock method, generate Blocks EPG for missing channels
|
||||||
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||||
timestamps = self.blocks.timestamps
|
timestamps = self.blocks.timestamps
|
||||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels()]:
|
for fhdhr_id in [x["id"] for x in self.channels.get_channels(method)]:
|
||||||
chan_obj = self.channels.list[fhdhr_id]
|
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, method)
|
||||||
if str(chan_obj.number) not in list(programguide.keys()):
|
if str(chan_obj.number) not in list(programguide.keys()):
|
||||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
||||||
@ -295,7 +292,7 @@ class EPG():
|
|||||||
self.epgdict[method] = sorted_chan_guide
|
self.epgdict[method] = sorted_chan_guide
|
||||||
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (epgtypename, total_programs, total_channels))
|
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (method, total_programs, total_channels))
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.fhdhr.logger.info("EPG Update Thread Starting")
|
self.fhdhr.logger.info("EPG Update Thread Starting")
|
||||||
|
|||||||
@ -3,18 +3,19 @@ import datetime
|
|||||||
|
|
||||||
class blocksEPG():
|
class blocksEPG():
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels):
|
def __init__(self, fhdhr, channels, origins, origin):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
self.origins = origins
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
def update_epg(self):
|
def update_epg(self):
|
||||||
programguide = {}
|
programguide = {}
|
||||||
|
|
||||||
timestamps = self.timestamps
|
timestamps = self.timestamps
|
||||||
|
|
||||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels()]:
|
for fhdhr_id in [x["id"] for x in self.channels.get_channels(self.origin)]:
|
||||||
chan_obj = self.channels.list[fhdhr_id]
|
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, self.origin)
|
||||||
|
|
||||||
if str(chan_obj.number) not in list(programguide.keys()):
|
if str(chan_obj.number) not in list(programguide.keys()):
|
||||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||||
|
|||||||
@ -4,22 +4,20 @@ import struct
|
|||||||
import time
|
import time
|
||||||
import threading
|
import threading
|
||||||
|
|
||||||
from .ssdp_detect import fHDHR_Detect
|
|
||||||
from .rmg_ssdp import RMG_SSDP
|
|
||||||
from .hdhr_ssdp import HDHR_SSDP
|
|
||||||
|
|
||||||
|
|
||||||
class SSDPServer():
|
class SSDPServer():
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.detect_method = fHDHR_Detect(fhdhr)
|
self.ssdp_handling = {}
|
||||||
|
self.methods = [x for x in list(self.fhdhr.plugins.plugins.keys()) if self.fhdhr.plugins.plugins[x].type == "ssdp"]
|
||||||
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
|
|
||||||
|
|
||||||
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
||||||
self.fhdhr.config.dict["ssdp"]["enabled"]):
|
self.fhdhr.config.dict["ssdp"]["enabled"] and
|
||||||
|
len(self.methods)):
|
||||||
|
|
||||||
|
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
|
||||||
self.setup_ssdp()
|
self.setup_ssdp()
|
||||||
|
|
||||||
self.sock.bind((self.bind_address, 1900))
|
self.sock.bind((self.bind_address, 1900))
|
||||||
@ -29,12 +27,18 @@ class SSDPServer():
|
|||||||
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
||||||
self.age_time = None
|
self.age_time = None
|
||||||
|
|
||||||
self.rmg_ssdp = RMG_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
self.ssdp_method_selfadd()
|
||||||
self.hdhr_ssdp = HDHR_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
|
||||||
|
|
||||||
self.do_alive()
|
self.do_alive()
|
||||||
self.m_search()
|
self.m_search()
|
||||||
|
|
||||||
|
def ssdp_method_selfadd(self):
|
||||||
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].type == "ssdp":
|
||||||
|
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||||
|
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||||
|
self.ssdp_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils, self.broadcast_ip, self.max_age)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.fhdhr.logger.info("SSDP Server Starting")
|
self.fhdhr.logger.info("SSDP Server Starting")
|
||||||
self.fhdhr.threads["ssdp"].start()
|
self.fhdhr.threads["ssdp"].start()
|
||||||
@ -62,21 +66,22 @@ class SSDPServer():
|
|||||||
|
|
||||||
if send_alive:
|
if send_alive:
|
||||||
self.fhdhr.logger.info("Sending Alive message to network.")
|
self.fhdhr.logger.info("Sending Alive message to network.")
|
||||||
self.do_notify(self.broadcase_address_tuple)
|
self.do_notify(self.broadcast_address_tuple)
|
||||||
self.age_time = time.time()
|
self.age_time = time.time()
|
||||||
|
|
||||||
def do_notify(self, address):
|
def do_notify(self, address):
|
||||||
|
|
||||||
notify_list = []
|
notify_list = []
|
||||||
|
for ssdp_handler in list(self.ssdp_handling.keys()):
|
||||||
hdhr_notify = self.hdhr_ssdp.get()
|
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'notify'):
|
||||||
notify_list.append(hdhr_notify)
|
notify_data = self.ssdp_handling[ssdp_handler].notify
|
||||||
|
if isinstance(notify_data, list):
|
||||||
if self.fhdhr.config.dict["rmg"]["enabled"]:
|
notify_list.extend(notify_data)
|
||||||
rmg_notify = self.rmg_ssdp.get()
|
else:
|
||||||
notify_list.append(rmg_notify)
|
notify_list.append(notify_data)
|
||||||
|
|
||||||
for notifydata in notify_list:
|
for notifydata in notify_list:
|
||||||
|
notifydata = notifydata.encode("utf-8")
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
||||||
try:
|
try:
|
||||||
@ -103,6 +108,10 @@ class SSDPServer():
|
|||||||
headers = [x.split(':', 1) for x in lines]
|
headers = [x.split(':', 1) for x in lines]
|
||||||
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||||
|
|
||||||
|
for ssdp_handler in list(self.ssdp_handling.keys()):
|
||||||
|
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'on_recv'):
|
||||||
|
self.ssdp_handling[ssdp_handler].on_recv(headers, cmd, list(self.ssdp_handling.keys()))
|
||||||
|
|
||||||
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
# SSDP discovery
|
# SSDP discovery
|
||||||
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
@ -110,26 +119,14 @@ class SSDPServer():
|
|||||||
|
|
||||||
self.do_notify(address)
|
self.do_notify(address)
|
||||||
|
|
||||||
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
if cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
# SSDP presence
|
|
||||||
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
try:
|
|
||||||
if headers["server"].startswith("fHDHR"):
|
|
||||||
savelocation = headers["location"].split("/device.xml")[0]
|
|
||||||
if savelocation.endswith("/hdhr"):
|
|
||||||
savelocation = savelocation.replace("/hdhr", '')
|
|
||||||
elif savelocation.endswith("/rmg"):
|
|
||||||
savelocation = savelocation.replace("/rmg", '')
|
|
||||||
if savelocation != self.fhdhr.api.base:
|
|
||||||
self.detect_method.set(savelocation)
|
|
||||||
except KeyError:
|
|
||||||
return
|
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
def m_search(self):
|
def m_search(self):
|
||||||
data = self.msearch_payload
|
data = self.msearch_payload
|
||||||
self.sock.sendto(data, self.broadcase_address_tuple)
|
self.sock.sendto(data, self.broadcast_address_tuple)
|
||||||
|
|
||||||
def create_msearch_payload(self):
|
def create_msearch_payload(self):
|
||||||
|
|
||||||
@ -176,7 +173,7 @@ class SSDPServer():
|
|||||||
if self.proto == "ipv4":
|
if self.proto == "ipv4":
|
||||||
self.af_type = socket.AF_INET
|
self.af_type = socket.AF_INET
|
||||||
self.broadcast_ip = "239.255.255.250"
|
self.broadcast_ip = "239.255.255.250"
|
||||||
self.broadcase_address_tuple = (self.broadcast_ip, 1900)
|
self.broadcast_address_tuple = (self.broadcast_ip, 1900)
|
||||||
self.bind_address = "0.0.0.0"
|
self.bind_address = "0.0.0.0"
|
||||||
elif self.proto == "ipv6":
|
elif self.proto == "ipv6":
|
||||||
self.af_type = socket.AF_INET6
|
self.af_type = socket.AF_INET6
|
||||||
|
|||||||
@ -1,49 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
class RMG_SSDP():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, broadcast_ip, max_age):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.ssdp_content = None
|
|
||||||
|
|
||||||
self.broadcast_ip = broadcast_ip
|
|
||||||
self.device_xml_path = '/rmg/device.xml'
|
|
||||||
|
|
||||||
self.cable_schema = "urn:schemas-opencable-com:service:Security:1"
|
|
||||||
self.ota_schema = "urn:schemas-upnp-org:device-1-0"
|
|
||||||
|
|
||||||
if self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "antenna":
|
|
||||||
self.schema = self.ota_schema
|
|
||||||
elif self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "cable":
|
|
||||||
self.schema = self.cable_schema
|
|
||||||
else:
|
|
||||||
self.schema = self.ota_schema
|
|
||||||
|
|
||||||
self.max_age = max_age
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
if self.ssdp_content:
|
|
||||||
return self.ssdp_content.encode("utf-8")
|
|
||||||
|
|
||||||
data = ''
|
|
||||||
data_command = "NOTIFY * HTTP/1.1"
|
|
||||||
|
|
||||||
data_dict = {
|
|
||||||
"HOST": "%s:%s" % ("239.255.255.250", 1900),
|
|
||||||
"NT": self.schema,
|
|
||||||
"NTS": "ssdp:alive",
|
|
||||||
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], self.schema),
|
|
||||||
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
|
||||||
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
|
||||||
"AL": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
|
||||||
"Cache-Control:max-age=": self.max_age
|
|
||||||
}
|
|
||||||
|
|
||||||
data += "%s\r\n" % data_command
|
|
||||||
for data_key in list(data_dict.keys()):
|
|
||||||
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
|
||||||
data += "\r\n"
|
|
||||||
|
|
||||||
self.ssdp_content = data
|
|
||||||
return data.encode("utf-8")
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
class fHDHR_Detect():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
|
||||||
|
|
||||||
def set(self, location):
|
|
||||||
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
|
||||||
if location not in detect_list:
|
|
||||||
detect_list.append(location)
|
|
||||||
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
|
||||||
@ -1,3 +1,4 @@
|
|||||||
|
import m3u8
|
||||||
|
|
||||||
from fHDHR.exceptions import TunerError
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
@ -11,97 +12,125 @@ class Tuners():
|
|||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
self.max_tuners = int(self.fhdhr.config.dict["fhdhr"]["tuner_count"])
|
|
||||||
|
|
||||||
self.tuners = {}
|
self.tuners = {}
|
||||||
|
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||||
|
self.tuners[origin] = {}
|
||||||
|
|
||||||
self.fhdhr.logger.info("Creating %s tuners." % str(self.max_tuners))
|
max_tuners = int(self.fhdhr.origins.origins_dict[origin].tuners)
|
||||||
|
|
||||||
for i in range(0, self.max_tuners):
|
self.fhdhr.logger.info("Creating %s tuners for %s." % (max_tuners, origin))
|
||||||
self.tuners[str(i)] = Tuner(fhdhr, i, epg)
|
|
||||||
|
|
||||||
def get_available_tuner(self):
|
for i in range(0, max_tuners):
|
||||||
return next(tunernum for tunernum in list(self.tuners.keys()) if not self.tuners[tunernum].tuner_lock.locked()) or None
|
self.tuners[origin][str(i)] = Tuner(fhdhr, i, epg, origin)
|
||||||
|
|
||||||
def get_scanning_tuner(self):
|
self.alt_stream_handlers = {}
|
||||||
return next(tunernum for tunernum in list(self.tuners.keys()) if self.tuners[tunernum].status["status"] == "Scanning") or None
|
|
||||||
|
|
||||||
def stop_tuner_scan(self):
|
def alt_stream_methods_selfadd(self):
|
||||||
tunernum = self.get_scanning_tuner()
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_stream":
|
||||||
|
method = self.fhdhr.plugins.plugins[plugin_name].name
|
||||||
|
self.alt_stream_handlers[method] = self.fhdhr.plugins.plugins[plugin_name]
|
||||||
|
|
||||||
|
def get_available_tuner(self, origin):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if not self.tuners[origin][tunernum].tuner_lock.locked()) or None
|
||||||
|
|
||||||
|
def get_scanning_tuner(self, origin):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if self.tuners[origin][tunernum].status["status"] == "Scanning") or None
|
||||||
|
|
||||||
|
def stop_tuner_scan(self, origin):
|
||||||
|
tunernum = self.get_scanning_tuner(origin)
|
||||||
if tunernum:
|
if tunernum:
|
||||||
self.tuners[str(tunernum)].close()
|
self.tuners[origin][str(tunernum)].close()
|
||||||
|
|
||||||
def tuner_scan(self):
|
def tuner_scan(self, origin="all"):
|
||||||
"""Temporarily use a tuner for a scan"""
|
"""Temporarily use a tuner for a scan"""
|
||||||
if not self.available_tuner_count():
|
|
||||||
raise TunerError("805 - All Tuners In Use")
|
|
||||||
|
|
||||||
tunernumber = self.get_available_tuner()
|
if origin == "all":
|
||||||
self.tuners[str(tunernumber)].channel_scan()
|
origins = list(self.tuners.keys())
|
||||||
|
else:
|
||||||
|
origins = [origin]
|
||||||
|
|
||||||
if not tunernumber:
|
for origin in origins:
|
||||||
raise TunerError("805 - All Tuners In Use")
|
|
||||||
|
|
||||||
def tuner_grab(self, tuner_number, channel_number):
|
if not self.available_tuner_count(origin):
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
if str(tuner_number) not in list(self.tuners.keys()):
|
tunernumber = self.get_available_tuner(origin)
|
||||||
self.fhdhr.logger.error("Tuner %s does not exist." % str(tuner_number))
|
self.tuners[origin][str(tunernumber)].channel_scan(origin)
|
||||||
|
|
||||||
|
if not tunernumber:
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
def tuner_grab(self, tuner_number, origin, channel_number):
|
||||||
|
|
||||||
|
if str(tuner_number) not in list(self.tuners[origin].keys()):
|
||||||
|
self.fhdhr.logger.error("Tuner %s does not exist for %s." % (tuner_number, origin))
|
||||||
raise TunerError("806 - Tune Failed")
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
# TunerError will raise if unavailable
|
# TunerError will raise if unavailable
|
||||||
self.tuners[str(tuner_number)].grab(channel_number)
|
self.tuners[origin][str(tuner_number)].grab(origin, channel_number)
|
||||||
|
|
||||||
return tuner_number
|
return tuner_number
|
||||||
|
|
||||||
def first_available(self, channel_number, dograb=True):
|
def first_available(self, origin, channel_number, dograb=True):
|
||||||
|
|
||||||
if not self.available_tuner_count():
|
if not self.available_tuner_count(origin):
|
||||||
raise TunerError("805 - All Tuners In Use")
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
tunernumber = self.get_available_tuner()
|
tunernumber = self.get_available_tuner(origin)
|
||||||
|
|
||||||
if not tunernumber:
|
if not tunernumber:
|
||||||
raise TunerError("805 - All Tuners In Use")
|
raise TunerError("805 - All Tuners In Use")
|
||||||
else:
|
else:
|
||||||
self.tuners[str(tunernumber)].grab(channel_number)
|
self.tuners[origin][str(tunernumber)].grab(origin, channel_number)
|
||||||
return tunernumber
|
return tunernumber
|
||||||
|
|
||||||
def tuner_close(self, tunernum):
|
def tuner_close(self, tunernum, origin):
|
||||||
self.tuners[str(tunernum)].close()
|
self.tuners[origin][str(tunernum)].close()
|
||||||
|
|
||||||
def status(self):
|
def status(self, origin=None):
|
||||||
all_status = {}
|
all_status = {}
|
||||||
for tunernum in list(self.tuners.keys()):
|
if origin:
|
||||||
all_status[tunernum] = self.tuners[str(tunernum)].get_status()
|
for tunernum in list(self.tuners[origin].keys()):
|
||||||
|
all_status[tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
||||||
|
else:
|
||||||
|
for origin in list(self.tuners.keys()):
|
||||||
|
all_status[origin] = {}
|
||||||
|
for tunernum in list(self.tuners[origin].keys()):
|
||||||
|
all_status[origin][tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
||||||
return all_status
|
return all_status
|
||||||
|
|
||||||
def available_tuner_count(self):
|
def available_tuner_count(self, origin):
|
||||||
available_tuners = 0
|
available_tuners = 0
|
||||||
for tunernum in list(self.tuners.keys()):
|
for tunernum in list(self.tuners[origin].keys()):
|
||||||
if not self.tuners[str(tunernum)].tuner_lock.locked():
|
if not self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
||||||
available_tuners += 1
|
available_tuners += 1
|
||||||
return available_tuners
|
return available_tuners
|
||||||
|
|
||||||
def inuse_tuner_count(self):
|
def inuse_tuner_count(self, origin):
|
||||||
inuse_tuners = 0
|
inuse_tuners = 0
|
||||||
for tunernum in list(self.tuners.keys()):
|
for tunernum in list(self.tuners[origin].keys()):
|
||||||
if self.tuners[str(tunernum)].tuner_lock.locked():
|
if self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
||||||
inuse_tuners += 1
|
inuse_tuners += 1
|
||||||
return inuse_tuners
|
return inuse_tuners
|
||||||
|
|
||||||
def get_stream_info(self, stream_args):
|
def get_stream_info(self, stream_args):
|
||||||
|
|
||||||
stream_info = self.channels.get_channel_stream(stream_args)
|
stream_info = self.channels.get_channel_stream(stream_args, stream_args["origin"])
|
||||||
if not stream_info:
|
if not stream_info:
|
||||||
raise TunerError("806 - Tune Failed")
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
if isinstance(stream_info, str):
|
if isinstance(stream_info, str):
|
||||||
stream_info = {"url": stream_info}
|
stream_info = {"url": stream_info, "headers": None}
|
||||||
stream_args["stream_info"] = stream_info
|
stream_args["stream_info"] = stream_info
|
||||||
|
|
||||||
if not stream_args["stream_info"]["url"]:
|
if not stream_args["stream_info"]["url"]:
|
||||||
raise TunerError("806 - Tune Failed")
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
if "headers" not in list(stream_args["stream_info"].keys()):
|
||||||
|
stream_args["stream_info"]["headers"] = None
|
||||||
|
|
||||||
if stream_args["stream_info"]["url"].startswith("udp://"):
|
if stream_args["stream_info"]["url"].startswith("udp://"):
|
||||||
stream_args["true_content_type"] = "video/mpeg"
|
stream_args["true_content_type"] = "video/mpeg"
|
||||||
stream_args["content_type"] = "video/mpeg"
|
stream_args["content_type"] = "video/mpeg"
|
||||||
@ -112,7 +141,89 @@ class Tuners():
|
|||||||
|
|
||||||
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
stream_args["content_type"] = "video/mpeg"
|
stream_args["content_type"] = "video/mpeg"
|
||||||
|
if stream_args["origin_quality"] != -1:
|
||||||
|
stream_args["stream_info"]["url"] = self.m3u8_quality(stream_args)
|
||||||
else:
|
else:
|
||||||
stream_args["content_type"] = stream_args["true_content_type"]
|
stream_args["content_type"] = stream_args["true_content_type"]
|
||||||
|
|
||||||
return stream_args
|
return stream_args
|
||||||
|
|
||||||
|
def m3u8_quality(self, stream_args):
|
||||||
|
|
||||||
|
m3u8_url = stream_args["stream_info"]["url"]
|
||||||
|
quality_profile = stream_args["origin_quality"]
|
||||||
|
|
||||||
|
if not quality_profile:
|
||||||
|
if stream_args["method"] == "direct":
|
||||||
|
quality_profile = "high"
|
||||||
|
self.fhdhr.logger.info("Origin Quality not set in config. Direct Method set and will default to Highest Quality")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Origin Quality not set in config. %s Method will select the Quality Automatically" % stream_args["method"])
|
||||||
|
return m3u8_url
|
||||||
|
else:
|
||||||
|
quality_profile = quality_profile.lower()
|
||||||
|
self.fhdhr.logger.info("Origin Quality set in config to %s" % (quality_profile))
|
||||||
|
|
||||||
|
while True:
|
||||||
|
self.fhdhr.logger.info("Opening m3u8 for reading %s" % m3u8_url)
|
||||||
|
|
||||||
|
try:
|
||||||
|
if stream_args["stream_info"]["headers"]:
|
||||||
|
videoUrlM3u = m3u8.load(m3u8_url, headers=stream_args["stream_info"]["headers"])
|
||||||
|
else:
|
||||||
|
videoUrlM3u = m3u8.load(m3u8_url)
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("m3u8 load error: %s" % e)
|
||||||
|
return m3u8_url
|
||||||
|
|
||||||
|
if len(videoUrlM3u.playlists):
|
||||||
|
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
||||||
|
|
||||||
|
# Create list of dicts
|
||||||
|
playlists, playlist_index = {}, 0
|
||||||
|
for playlist_item in videoUrlM3u.playlists:
|
||||||
|
playlist_index += 1
|
||||||
|
playlist_dict = {
|
||||||
|
"url": playlist_item.absolute_uri,
|
||||||
|
"bandwidth": playlist_item.stream_info.bandwidth,
|
||||||
|
}
|
||||||
|
|
||||||
|
if not playlist_item.stream_info.resolution:
|
||||||
|
playlist_dict["width"] = None
|
||||||
|
playlist_dict["height"] = None
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
playlist_dict["width"] = playlist_item.stream_info.resolution[0]
|
||||||
|
playlist_dict["height"] = playlist_item.stream_info.resolution[1]
|
||||||
|
except TypeError:
|
||||||
|
playlist_dict["width"] = None
|
||||||
|
playlist_dict["height"] = None
|
||||||
|
|
||||||
|
playlists[playlist_index] = playlist_dict
|
||||||
|
|
||||||
|
sorted_playlists = sorted(playlists, key=lambda i: (
|
||||||
|
int(playlists[i]['bandwidth']),
|
||||||
|
int(playlists[i]['width'] or 0),
|
||||||
|
int(playlists[i]['height'] or 0)
|
||||||
|
))
|
||||||
|
sorted_playlists = [playlists[x] for x in sorted_playlists]
|
||||||
|
|
||||||
|
if not quality_profile or quality_profile == "high":
|
||||||
|
selected_index = -1
|
||||||
|
elif quality_profile == "medium":
|
||||||
|
selected_index = int((len(sorted_playlists) - 1)/2)
|
||||||
|
elif quality_profile == "low":
|
||||||
|
selected_index = 0
|
||||||
|
|
||||||
|
m3u8_stats = ",".join(
|
||||||
|
["%s %s" % (x, sorted_playlists[selected_index][x])
|
||||||
|
for x in list(sorted_playlists[selected_index].keys())
|
||||||
|
if x != "url" and sorted_playlists[selected_index][x]])
|
||||||
|
self.fhdhr.logger.info("Selected m3u8 details: %s" % m3u8_stats)
|
||||||
|
m3u8_url = sorted_playlists[selected_index]["url"]
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("No m3u8 varients found")
|
||||||
|
break
|
||||||
|
|
||||||
|
return m3u8_url
|
||||||
|
|||||||
@ -2,8 +2,6 @@
|
|||||||
|
|
||||||
from .direct_stream import Direct_Stream
|
from .direct_stream import Direct_Stream
|
||||||
from .direct_m3u8_stream import Direct_M3U8_Stream
|
from .direct_m3u8_stream import Direct_M3U8_Stream
|
||||||
from .ffmpeg_stream import FFMPEG_Stream
|
|
||||||
from .vlc_stream import VLC_Stream
|
|
||||||
|
|
||||||
|
|
||||||
class Stream():
|
class Stream():
|
||||||
@ -12,16 +10,14 @@ class Stream():
|
|||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.stream_args = stream_args
|
self.stream_args = stream_args
|
||||||
|
|
||||||
if stream_args["method"] == "ffmpeg":
|
if stream_args["method"] == "direct":
|
||||||
self.method = FFMPEG_Stream(fhdhr, stream_args, tuner)
|
if self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
if stream_args["method"] == "vlc":
|
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
||||||
self.method = VLC_Stream(fhdhr, stream_args, tuner)
|
else:
|
||||||
elif (stream_args["method"] == "direct" and
|
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||||
not self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
else:
|
||||||
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
plugin_name = self.fhdhr.config.dict["streaming"]["valid_methods"][stream_args["method"]]["plugin"]
|
||||||
elif (stream_args["method"] == "direct" and
|
self.method = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, self.fhdhr.plugins.plugins[plugin_name].plugin_utils, stream_args, tuner)
|
||||||
self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
|
||||||
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
return self.method.get()
|
return self.method.get()
|
||||||
|
|||||||
@ -21,18 +21,10 @@ class Direct_M3U8_Stream():
|
|||||||
if not self.stream_args["duration"] == 0:
|
if not self.stream_args["duration"] == 0:
|
||||||
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
self.fhdhr.logger.info("Detected stream URL is m3u8: %s" % self.stream_args["true_content_type"])
|
self.fhdhr.logger.info("Detected stream of m3u8 URL: %s" % self.stream_args["stream_info"]["url"])
|
||||||
|
|
||||||
channel_stream_url = self.stream_args["stream_info"]["url"]
|
if self.stream_args["transcode_quality"]:
|
||||||
while True:
|
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
||||||
|
|
||||||
self.fhdhr.logger.info("Opening m3u8 for reading %s" % channel_stream_url)
|
|
||||||
videoUrlM3u = m3u8.load(channel_stream_url)
|
|
||||||
if len(videoUrlM3u.playlists):
|
|
||||||
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
|
||||||
channel_stream_url = videoUrlM3u.playlists[0].absolute_uri
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
|
|
||||||
@ -42,7 +34,16 @@ class Direct_M3U8_Stream():
|
|||||||
|
|
||||||
while self.tuner.tuner_lock.locked():
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
playlist = m3u8.load(channel_stream_url)
|
try:
|
||||||
|
if self.stream_args["stream_info"]["headers"]:
|
||||||
|
playlist = m3u8.load(self.stream_args["stream_info"]["url"], headers=self.stream_args["stream_info"]["headers"])
|
||||||
|
else:
|
||||||
|
playlist = m3u8.load(self.stream_args["stream_info"]["url"])
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
|
self.tuner.close()
|
||||||
|
return None
|
||||||
|
|
||||||
segments = playlist.segments
|
segments = playlist.segments
|
||||||
|
|
||||||
if len(played_chunk_urls):
|
if len(played_chunk_urls):
|
||||||
@ -70,13 +71,19 @@ class Direct_M3U8_Stream():
|
|||||||
self.fhdhr.logger.info("Requested Duration Expired.")
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
|
|
||||||
chunk = self.fhdhr.web.session.get(chunkurl).content
|
if self.stream_args["stream_info"]["headers"]:
|
||||||
|
chunk = self.fhdhr.web.session.get(chunkurl, headers=self.stream_args["stream_info"]["headers"]).content
|
||||||
|
else:
|
||||||
|
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
# raise TunerError("807 - No Video Data")
|
# raise TunerError("807 - No Video Data")
|
||||||
if key:
|
if key:
|
||||||
if key["url"]:
|
if key["url"]:
|
||||||
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
if self.stream_args["stream_info"]["headers"]:
|
||||||
|
keyfile = self.fhdhr.web.session.get(key["url"], headers=self.stream_args["stream_info"]["headers"]).content
|
||||||
|
else:
|
||||||
|
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||||
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||||
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
||||||
chunk = cryptor.decrypt(chunk)
|
chunk = cryptor.decrypt(chunk)
|
||||||
@ -94,6 +101,8 @@ class Direct_M3U8_Stream():
|
|||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
finally:
|
finally:
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
||||||
|
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
# raise TunerError("806 - Tune Failed")
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
|||||||
@ -20,7 +20,13 @@ class Direct_Stream():
|
|||||||
|
|
||||||
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
|
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
|
||||||
|
|
||||||
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
|
if self.stream_args["transcode_quality"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
||||||
|
|
||||||
|
if self.stream_args["stream_info"]["headers"]:
|
||||||
|
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True, headers=self.stream_args["stream_info"]["headers"])
|
||||||
|
else:
|
||||||
|
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
|
|
||||||
@ -58,6 +64,8 @@ class Direct_Stream():
|
|||||||
finally:
|
finally:
|
||||||
req.close()
|
req.close()
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
||||||
|
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
# raise TunerError("806 - Tune Failed")
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
|||||||
@ -1,132 +0,0 @@
|
|||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
# from fHDHR.exceptions import TunerError
|
|
||||||
|
|
||||||
|
|
||||||
class FFMPEG_Stream():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, stream_args, tuner):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.stream_args = stream_args
|
|
||||||
self.tuner = tuner
|
|
||||||
|
|
||||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
|
||||||
self.ffmpeg_command = self.ffmpeg_command_assemble(stream_args)
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
|
|
||||||
ffmpeg_proc = subprocess.Popen(self.ffmpeg_command, stdout=subprocess.PIPE)
|
|
||||||
|
|
||||||
def generate():
|
|
||||||
try:
|
|
||||||
while self.tuner.tuner_lock.locked():
|
|
||||||
|
|
||||||
chunk = ffmpeg_proc.stdout.read(self.bytes_per_read)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
# raise TunerError("807 - No Video Data")
|
|
||||||
yield chunk
|
|
||||||
chunk_size = int(sys.getsizeof(chunk))
|
|
||||||
self.tuner.add_downloaded_size(chunk_size)
|
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
|
|
||||||
except GeneratorExit:
|
|
||||||
self.fhdhr.logger.info("Connection Closed.")
|
|
||||||
except Exception as e:
|
|
||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
|
||||||
finally:
|
|
||||||
ffmpeg_proc.terminate()
|
|
||||||
ffmpeg_proc.communicate()
|
|
||||||
ffmpeg_proc.kill()
|
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
self.tuner.close()
|
|
||||||
# raise TunerError("806 - Tune Failed")
|
|
||||||
|
|
||||||
return generate()
|
|
||||||
|
|
||||||
def ffmpeg_command_assemble(self, stream_args):
|
|
||||||
ffmpeg_command = [
|
|
||||||
self.fhdhr.config.dict["ffmpeg"]["path"],
|
|
||||||
"-i", stream_args["stream_info"]["url"],
|
|
||||||
]
|
|
||||||
ffmpeg_command.extend(self.ffmpeg_duration(stream_args))
|
|
||||||
ffmpeg_command.extend(self.transcode_profiles(stream_args))
|
|
||||||
ffmpeg_command.extend(self.ffmpeg_loglevel())
|
|
||||||
ffmpeg_command.extend(["pipe:stdout"])
|
|
||||||
return ffmpeg_command
|
|
||||||
|
|
||||||
def ffmpeg_duration(self, stream_args):
|
|
||||||
ffmpeg_command = []
|
|
||||||
if stream_args["duration"]:
|
|
||||||
ffmpeg_command.extend(["-t", str(stream_args["duration"])])
|
|
||||||
else:
|
|
||||||
ffmpeg_command.extend(
|
|
||||||
[
|
|
||||||
"-reconnect", "1",
|
|
||||||
"-reconnect_at_eof", "1",
|
|
||||||
"-reconnect_streamed", "1",
|
|
||||||
"-reconnect_delay_max", "2",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
return ffmpeg_command
|
|
||||||
|
|
||||||
def ffmpeg_loglevel(self):
|
|
||||||
ffmpeg_command = []
|
|
||||||
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
|
||||||
|
|
||||||
loglevel_dict = {
|
|
||||||
"debug": "debug",
|
|
||||||
"info": "info",
|
|
||||||
"error": "error",
|
|
||||||
"warning": "warning",
|
|
||||||
"critical": "fatal",
|
|
||||||
}
|
|
||||||
if log_level not in ["info", "debug"]:
|
|
||||||
ffmpeg_command.extend(["-nostats", "-hide_banner"])
|
|
||||||
ffmpeg_command.extend(["-loglevel", loglevel_dict[log_level]])
|
|
||||||
return ffmpeg_command
|
|
||||||
|
|
||||||
def transcode_profiles(self, stream_args):
|
|
||||||
# TODO implement actual profiles here
|
|
||||||
"""
|
|
||||||
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
|
||||||
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
|
||||||
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
|
||||||
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
|
||||||
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
|
||||||
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
|
||||||
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
|
||||||
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
|
||||||
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
|
||||||
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
|
||||||
"""
|
|
||||||
|
|
||||||
if stream_args["transcode"]:
|
|
||||||
self.fhdhr.logger.info("Client requested a %s transcode for stream." % stream_args["transcode"])
|
|
||||||
stream_args["transcode"] = None
|
|
||||||
|
|
||||||
ffmpeg_command = []
|
|
||||||
|
|
||||||
if not stream_args["transcode"]:
|
|
||||||
ffmpeg_command.extend(
|
|
||||||
[
|
|
||||||
"-c", "copy",
|
|
||||||
"-f", "mpegts",
|
|
||||||
]
|
|
||||||
)
|
|
||||||
elif stream_args["transcode"] == "heavy":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "mobile":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet720":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet480":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet360":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet240":
|
|
||||||
ffmpeg_command.extend([])
|
|
||||||
|
|
||||||
return ffmpeg_command
|
|
||||||
@ -1,122 +0,0 @@
|
|||||||
import sys
|
|
||||||
import subprocess
|
|
||||||
|
|
||||||
# from fHDHR.exceptions import TunerError
|
|
||||||
|
|
||||||
|
|
||||||
class VLC_Stream():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, stream_args, tuner):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.stream_args = stream_args
|
|
||||||
self.tuner = tuner
|
|
||||||
|
|
||||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
|
||||||
self.vlc_command = self.vlc_command_assemble(stream_args)
|
|
||||||
|
|
||||||
def get(self):
|
|
||||||
|
|
||||||
vlc_proc = subprocess.Popen(self.vlc_command, stdout=subprocess.PIPE)
|
|
||||||
|
|
||||||
def generate():
|
|
||||||
try:
|
|
||||||
|
|
||||||
while self.tuner.tuner_lock.locked():
|
|
||||||
|
|
||||||
chunk = vlc_proc.stdout.read(self.bytes_per_read)
|
|
||||||
if not chunk:
|
|
||||||
break
|
|
||||||
# raise TunerError("807 - No Video Data")
|
|
||||||
yield chunk
|
|
||||||
chunk_size = int(sys.getsizeof(chunk))
|
|
||||||
self.tuner.add_downloaded_size(chunk_size)
|
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
|
|
||||||
except GeneratorExit:
|
|
||||||
self.fhdhr.logger.info("Connection Closed.")
|
|
||||||
except Exception as e:
|
|
||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
|
||||||
finally:
|
|
||||||
vlc_proc.terminate()
|
|
||||||
vlc_proc.communicate()
|
|
||||||
vlc_proc.kill()
|
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
self.tuner.close()
|
|
||||||
# raise TunerError("806 - Tune Failed")
|
|
||||||
|
|
||||||
return generate()
|
|
||||||
|
|
||||||
def vlc_command_assemble(self, stream_args):
|
|
||||||
vlc_command = [
|
|
||||||
self.fhdhr.config.dict["vlc"]["path"],
|
|
||||||
"-I", "dummy", stream_args["stream_info"]["url"],
|
|
||||||
]
|
|
||||||
vlc_command.extend(self.vlc_duration(stream_args))
|
|
||||||
vlc_command.extend(self.vlc_loglevel())
|
|
||||||
vlc_command.extend(["--sout"])
|
|
||||||
vlc_command.extend(self.transcode_profiles(stream_args))
|
|
||||||
return vlc_command
|
|
||||||
|
|
||||||
def vlc_duration(self, stream_args):
|
|
||||||
vlc_command = []
|
|
||||||
if stream_args["duration"]:
|
|
||||||
vlc_command.extend(["--run-time=%s" % str(stream_args["duration"])])
|
|
||||||
return vlc_command
|
|
||||||
|
|
||||||
def vlc_loglevel(self):
|
|
||||||
vlc_command = []
|
|
||||||
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
|
||||||
|
|
||||||
loglevel_dict = {
|
|
||||||
"debug": "3",
|
|
||||||
"info": "0",
|
|
||||||
"error": "1",
|
|
||||||
"warning": "2",
|
|
||||||
"critical": "1",
|
|
||||||
}
|
|
||||||
vlc_command.extend(["--log-verbose=", loglevel_dict[log_level]])
|
|
||||||
if log_level not in ["info", "debug"]:
|
|
||||||
vlc_command.extend(["--quiet"])
|
|
||||||
return vlc_command
|
|
||||||
|
|
||||||
def transcode_profiles(self, stream_args):
|
|
||||||
# TODO implement actual profiles here
|
|
||||||
"""
|
|
||||||
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
|
||||||
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
|
||||||
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
|
||||||
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
|
||||||
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
|
||||||
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
|
||||||
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
|
||||||
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
|
||||||
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
|
||||||
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
|
||||||
"""
|
|
||||||
vlc_command = []
|
|
||||||
|
|
||||||
if stream_args["transcode"]:
|
|
||||||
self.fhdhr.logger.info("Client requested a %s transcode for stream." % stream_args["transcode"])
|
|
||||||
stream_args["transcode"] = None
|
|
||||||
|
|
||||||
vlc_transcode_string = "#std{mux=ts,access=file,dst=-}"
|
|
||||||
return [vlc_transcode_string]
|
|
||||||
|
|
||||||
'#transcode{vcodec=mp2v,vb=4096,acodec=mp2a,ab=192,scale=1,channels=2,deinterlace}:std{access=file,mux=ts,dst=-"}'
|
|
||||||
|
|
||||||
if not stream_args["transcode"]:
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "heavy":
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "mobile":
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet720":
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet480":
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet360":
|
|
||||||
vlc_command.extend([])
|
|
||||||
elif stream_args["transcode"] == "internet240":
|
|
||||||
vlc_command.extend([])
|
|
||||||
|
|
||||||
return vlc_command
|
|
||||||
@ -8,21 +8,22 @@ from .stream import Stream
|
|||||||
|
|
||||||
|
|
||||||
class Tuner():
|
class Tuner():
|
||||||
def __init__(self, fhdhr, inum, epg):
|
def __init__(self, fhdhr, inum, epg, origin):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.number = inum
|
self.number = inum
|
||||||
|
self.origin = origin
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
|
|
||||||
self.tuner_lock = threading.Lock()
|
self.tuner_lock = threading.Lock()
|
||||||
self.set_off_status()
|
self.set_off_status()
|
||||||
|
|
||||||
self.chanscan_url = "/api/channels?method=scan"
|
self.chanscan_url = "/api/channels?method=scan"
|
||||||
self.close_url = "/api/tuners?method=close&tuner=%s" % str(self.number)
|
self.close_url = "/api/tuners?method=close&tuner=%s&origin=%s" % (self.number, self.origin)
|
||||||
|
|
||||||
def channel_scan(self, grabbed=False):
|
def channel_scan(self, origin, grabbed=False):
|
||||||
if self.tuner_lock.locked() and not grabbed:
|
if self.tuner_lock.locked() and not grabbed:
|
||||||
self.fhdhr.logger.error("Tuner #%s is not available." % str(self.number))
|
self.fhdhr.logger.error("%s Tuner #%s is not available." % (self.origin, self.number))
|
||||||
raise TunerError("804 - Tuner In Use")
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
|
||||||
if self.status["status"] == "Scanning":
|
if self.status["status"] == "Scanning":
|
||||||
@ -32,14 +33,16 @@ class Tuner():
|
|||||||
if not grabbed:
|
if not grabbed:
|
||||||
self.tuner_lock.acquire()
|
self.tuner_lock.acquire()
|
||||||
self.status["status"] = "Scanning"
|
self.status["status"] = "Scanning"
|
||||||
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan." % str(self.number))
|
self.status["origin"] = origin
|
||||||
|
self.status["time_start"] = datetime.datetime.utcnow()
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan for %s origin." % (self.number, origin))
|
||||||
|
|
||||||
chanscan = threading.Thread(target=self.runscan)
|
chanscan = threading.Thread(target=self.runscan, args=(origin,))
|
||||||
chanscan.start()
|
chanscan.start()
|
||||||
|
|
||||||
def runscan(self):
|
def runscan(self, origin):
|
||||||
self.fhdhr.api.get(self.chanscan_url)
|
self.fhdhr.api.get("%s&origin=%s" % (self.chanscan_url, origin))
|
||||||
self.fhdhr.logger.info("Requested Channel Scan Complete.")
|
self.fhdhr.logger.info("Requested Channel Scan for %s origin Complete." % origin)
|
||||||
self.close()
|
self.close()
|
||||||
self.fhdhr.api.get(self.close_url)
|
self.fhdhr.api.get(self.close_url)
|
||||||
|
|
||||||
@ -47,13 +50,15 @@ class Tuner():
|
|||||||
if "downloaded" in list(self.status.keys()):
|
if "downloaded" in list(self.status.keys()):
|
||||||
self.status["downloaded"] += bytes_count
|
self.status["downloaded"] += bytes_count
|
||||||
|
|
||||||
def grab(self, channel_number):
|
def grab(self, origin, channel_number):
|
||||||
if self.tuner_lock.locked():
|
if self.tuner_lock.locked():
|
||||||
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
|
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
|
||||||
raise TunerError("804 - Tuner In Use")
|
raise TunerError("804 - Tuner In Use")
|
||||||
self.tuner_lock.acquire()
|
self.tuner_lock.acquire()
|
||||||
self.status["status"] = "Acquired"
|
self.status["status"] = "Acquired"
|
||||||
|
self.status["origin"] = origin
|
||||||
self.status["channel"] = channel_number
|
self.status["channel"] = channel_number
|
||||||
|
self.status["time_start"] = datetime.datetime.utcnow()
|
||||||
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
@ -64,12 +69,15 @@ class Tuner():
|
|||||||
|
|
||||||
def get_status(self):
|
def get_status(self):
|
||||||
current_status = self.status.copy()
|
current_status = self.status.copy()
|
||||||
if current_status["status"] == "Active":
|
current_status["epg"] = {}
|
||||||
current_status["Play Time"] = str(
|
if current_status["status"] in ["Acquired", "Active", "Scanning"]:
|
||||||
|
current_status["running_time"] = str(
|
||||||
humanized_time(
|
humanized_time(
|
||||||
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||||
current_status["time_start"] = str(current_status["time_start"])
|
current_status["time_start"] = str(current_status["time_start"])
|
||||||
current_status["epg"] = self.epg.whats_on_now(current_status["channel"])
|
if current_status["status"] in ["Active"]:
|
||||||
|
if current_status["origin"] in self.epg.epg_methods:
|
||||||
|
current_status["epg"] = self.epg.whats_on_now(current_status["channel"], method=current_status["origin"])
|
||||||
return current_status
|
return current_status
|
||||||
|
|
||||||
def set_off_status(self):
|
def set_off_status(self):
|
||||||
@ -77,7 +85,7 @@ class Tuner():
|
|||||||
|
|
||||||
def get_stream(self, stream_args, tuner):
|
def get_stream(self, stream_args, tuner):
|
||||||
stream = Stream(self.fhdhr, stream_args, tuner)
|
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||||
return stream.get()
|
return stream
|
||||||
|
|
||||||
def set_status(self, stream_args):
|
def set_status(self, stream_args):
|
||||||
if self.status["status"] != "Active":
|
if self.status["status"] != "Active":
|
||||||
@ -87,6 +95,7 @@ class Tuner():
|
|||||||
"clients_id": [],
|
"clients_id": [],
|
||||||
"method": stream_args["method"],
|
"method": stream_args["method"],
|
||||||
"accessed": [stream_args["accessed"]],
|
"accessed": [stream_args["accessed"]],
|
||||||
|
"origin": stream_args["origin"],
|
||||||
"channel": stream_args["channel"],
|
"channel": stream_args["channel"],
|
||||||
"proxied_url": stream_args["stream_info"]["url"],
|
"proxied_url": stream_args["stream_info"]["url"],
|
||||||
"time_start": datetime.datetime.utcnow(),
|
"time_start": datetime.datetime.utcnow(),
|
||||||
|
|||||||
48
fHDHR/origins/__init__.py
Normal file
48
fHDHR/origins/__init__.py
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class Origin_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict, stream_args):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class Origins():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origins_dict = {}
|
||||||
|
self.origin_selfadd()
|
||||||
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"] and self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod_type"] == "origin":
|
||||||
|
self.fhdhr.plugins.plugins[plugin_name].plugin_utils.origin = self.origins_dict[self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"].lower()]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def valid_origins(self):
|
||||||
|
return [origin for origin in list(self.origins_dict.keys())]
|
||||||
|
|
||||||
|
def origin_selfadd(self):
|
||||||
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].type == "origin":
|
||||||
|
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||||
|
try:
|
||||||
|
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||||
|
self.origins_dict[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(plugin_utils)
|
||||||
|
self.fhdhr.logger.info("%s Setup Success" % method)
|
||||||
|
self.origins_dict[method].setup_success = True
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.fhdhr.logger.error(e)
|
||||||
|
self.origins_dict[method] = Origin_StandIN()
|
||||||
|
|
||||||
|
if not hasattr(self.origins_dict[method], 'tuners'):
|
||||||
|
self.origins_dict[method].tuners = 4
|
||||||
|
|
||||||
|
if not hasattr(self.origins_dict[method], 'stream_method'):
|
||||||
|
self.origins_dict[method].stream_method = self.fhdhr.config.dict["streaming"]["method"]
|
||||||
@ -1,56 +0,0 @@
|
|||||||
from .origin_channels_standin import OriginChannels_StandIN
|
|
||||||
from .origin_epg_standin import OriginEPG_StandIN
|
|
||||||
|
|
||||||
import fHDHR.exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class OriginServiceWrapper():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, origin):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.origin = origin
|
|
||||||
|
|
||||||
self.servicename = fhdhr.config.dict["main"]["servicename"]
|
|
||||||
|
|
||||||
self.setup_success = None
|
|
||||||
self.setup()
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
|
|
||||||
try:
|
|
||||||
self.originservice = self.origin.OriginService(self.fhdhr)
|
|
||||||
self.setup_success = True
|
|
||||||
self.fhdhr.logger.info("%s Setup Success" % self.servicename)
|
|
||||||
except fHDHR.exceptions.OriginSetupError as e:
|
|
||||||
self.originservice = None
|
|
||||||
self.fhdhr.logger.error(e)
|
|
||||||
self.setup_success = False
|
|
||||||
|
|
||||||
if self.setup_success:
|
|
||||||
self.channels = self.origin.OriginChannels(self.fhdhr, self.originservice)
|
|
||||||
self.epg = self.origin.OriginEPG(self.fhdhr)
|
|
||||||
else:
|
|
||||||
self.channels = OriginChannels_StandIN()
|
|
||||||
self.epg = OriginEPG_StandIN()
|
|
||||||
|
|
||||||
def get_channels(self):
|
|
||||||
return self.channels.get_channels()
|
|
||||||
|
|
||||||
def get_channel_stream(self, chandict, stream_args):
|
|
||||||
return self.channels.get_channel_stream(chandict, stream_args)
|
|
||||||
|
|
||||||
def update_epg(self, channels):
|
|
||||||
return self.epg.update_epg(channels)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
''' will only get called for undefined attributes '''
|
|
||||||
if hasattr(self.fhdhr, name):
|
|
||||||
return eval("self.fhdhr.%s" % name)
|
|
||||||
if hasattr(self.originservice, name):
|
|
||||||
return eval("self.originservice.%s" % name)
|
|
||||||
elif hasattr(self.channels, name):
|
|
||||||
return eval("self.channels.%s" % name)
|
|
||||||
elif hasattr(self.epg, name):
|
|
||||||
return eval("self.epg.%s" % name)
|
|
||||||
else:
|
|
||||||
raise AttributeError(name)
|
|
||||||
@ -1,11 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
class OriginChannels_StandIN():
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def get_channels(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_channel_stream(self, chandict, stream_args):
|
|
||||||
return None
|
|
||||||
@ -1,8 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
class OriginEPG_StandIN():
|
|
||||||
def __init__(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def update_epg(self, channels):
|
|
||||||
return {}
|
|
||||||
250
fHDHR/plugins/__init__.py
Normal file
250
fHDHR/plugins/__init__.py
Normal file
@ -0,0 +1,250 @@
|
|||||||
|
import os
|
||||||
|
import imp
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_DB():
|
||||||
|
def __init__(self, db, name):
|
||||||
|
self._db = db
|
||||||
|
self.name = name
|
||||||
|
self.namespace = name.lower()
|
||||||
|
|
||||||
|
# fhdhr
|
||||||
|
def set_fhdhr_value(self, pluginitem, key, value, namespace="default"):
|
||||||
|
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
||||||
|
return
|
||||||
|
|
||||||
|
def get_fhdhr_value(self, pluginitem, key, namespace="default"):
|
||||||
|
return self._db.get_fhdhr_value(pluginitem, key, namespace=namespace.lower())
|
||||||
|
|
||||||
|
def delete_fhdhr_value(self, pluginitem, key, namespace="default"):
|
||||||
|
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
||||||
|
return
|
||||||
|
|
||||||
|
# Plugin
|
||||||
|
def set_plugin_value(self, pluginitem, key, value, namespace=None):
|
||||||
|
if not namespace:
|
||||||
|
namespace = self.namespace
|
||||||
|
elif namespace.lower() != self.namespace:
|
||||||
|
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
||||||
|
return
|
||||||
|
return self._db.set_plugin_value(pluginitem, key, value, namespace=self.namespace)
|
||||||
|
|
||||||
|
def get_plugin_value(self, pluginitem, key, namespace=None):
|
||||||
|
if not namespace:
|
||||||
|
namespace = self.namespace
|
||||||
|
return self._db.get_plugin_value(pluginitem, key, namespace=namespace.lower())
|
||||||
|
|
||||||
|
def delete_plugin_value(self, pluginitem, key, namespace=None):
|
||||||
|
if not namespace:
|
||||||
|
namespace = self.namespace
|
||||||
|
elif namespace.lower() != self.namespace:
|
||||||
|
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
||||||
|
return
|
||||||
|
return self._db.delete_plugin_value(pluginitem, key, namespace=self.namespace)
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_Config():
|
||||||
|
def __init__(self, config, name):
|
||||||
|
self._config = config
|
||||||
|
self.name = name
|
||||||
|
self.namespace = name.lower()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def dict(self):
|
||||||
|
return self._config.dict.copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def internal(self):
|
||||||
|
return self._config.internal.copy()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def conf_default(self):
|
||||||
|
return self._config.conf_default.copy()
|
||||||
|
|
||||||
|
def write(self, key, value, namespace=None):
|
||||||
|
if not namespace:
|
||||||
|
namespace = self.namespace
|
||||||
|
elif str(namespace).lower() != self.namespace:
|
||||||
|
print("%s plugin is not allowed write access to fhdhr config namespaces." % self.name)
|
||||||
|
return
|
||||||
|
return self._config.write(key, value, self.namespace)
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_Utils():
|
||||||
|
|
||||||
|
def __init__(self, config, logger, db, plugin_name, plugin_manifest, modname):
|
||||||
|
self.config = Plugin_Config(config, plugin_manifest["name"])
|
||||||
|
self.db = Plugin_DB(db, plugin_manifest["name"])
|
||||||
|
self.logger = logger
|
||||||
|
self.namespace = plugin_manifest["name"].lower()
|
||||||
|
self.plugin_name = plugin_name
|
||||||
|
self.plugin_manifest = plugin_manifest
|
||||||
|
self.origin = None
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin():
|
||||||
|
|
||||||
|
def __init__(self, config, logger, db, plugin_name, plugin_path, plugin_conf, plugin_manifest):
|
||||||
|
self.config = config
|
||||||
|
self.db = db
|
||||||
|
self.logger = logger
|
||||||
|
|
||||||
|
# Gather Info about Plugin
|
||||||
|
self.plugin_name = plugin_name
|
||||||
|
self.modname = os.path.basename(plugin_path)
|
||||||
|
self.path = plugin_path
|
||||||
|
self.module_type = imp.PKG_DIRECTORY
|
||||||
|
self.multi_plugin = (self.plugin_name != self.modname)
|
||||||
|
self.default_conf = plugin_conf
|
||||||
|
self.manifest = plugin_manifest
|
||||||
|
|
||||||
|
if self.multi_plugin:
|
||||||
|
self.plugin_dict_name = "%s_%s" % (plugin_name, self.modname)
|
||||||
|
else:
|
||||||
|
self.plugin_dict_name = plugin_name
|
||||||
|
|
||||||
|
self.plugin_utils = Plugin_Utils(config, logger, db, plugin_name, plugin_manifest, self.modname)
|
||||||
|
|
||||||
|
# Load the module
|
||||||
|
self._module = self._load()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
if self.type == "alt_epg":
|
||||||
|
self.config.register_valid_epg_method(self.name, self.plugin_dict_name)
|
||||||
|
elif self.type == "alt_stream":
|
||||||
|
self.config.register_valid_streaming_method(self.name, self.plugin_dict_name)
|
||||||
|
elif self.type == "web":
|
||||||
|
self.config.register_web_path(self.manifest["name"], self.path, self.plugin_dict_name)
|
||||||
|
|
||||||
|
if self.has_setup():
|
||||||
|
self._module.setup(self)
|
||||||
|
|
||||||
|
def has_setup(self):
|
||||||
|
return hasattr(self._module, 'setup')
|
||||||
|
|
||||||
|
def _load(self):
|
||||||
|
description = ('', '', self.module_type)
|
||||||
|
mod = imp.load_module(self.plugin_dict_name, None, self.path, description)
|
||||||
|
return mod
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name(self):
|
||||||
|
return self.manifest["name"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def version(self):
|
||||||
|
return self.manifest["version"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def type(self):
|
||||||
|
return self.manifest["type"]
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name == "Plugin_OBJ":
|
||||||
|
return self._module.Plugin_OBJ
|
||||||
|
|
||||||
|
|
||||||
|
class PluginsHandler():
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
|
||||||
|
self.plugins = {}
|
||||||
|
|
||||||
|
self.found_plugins = []
|
||||||
|
self.found_plugins_conf = []
|
||||||
|
self.list_plugins()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
for plugin_name in list(self.plugins.keys()):
|
||||||
|
self.plugins[plugin_name].setup()
|
||||||
|
|
||||||
|
def load_plugin_configs(self):
|
||||||
|
for file_item_path in self.found_plugins_conf:
|
||||||
|
self.config.import_conf_json(file_item_path)
|
||||||
|
|
||||||
|
def list_plugins(self):
|
||||||
|
for directory in self.config.internal["paths"]["plugins_dir"]:
|
||||||
|
|
||||||
|
base = os.path.abspath(directory)
|
||||||
|
for filename in os.listdir(base):
|
||||||
|
abspath = os.path.join(base, filename)
|
||||||
|
|
||||||
|
if os.path.isdir(abspath):
|
||||||
|
|
||||||
|
plugin_conf = []
|
||||||
|
for subfilename in os.listdir(abspath):
|
||||||
|
subabspath = os.path.join(abspath, subfilename)
|
||||||
|
if subfilename.endswith("_conf.json"):
|
||||||
|
plugin_conf.append(subabspath)
|
||||||
|
self.found_plugins_conf.append(subabspath)
|
||||||
|
|
||||||
|
# Plugin/multi-plugin must have a basic manifest json
|
||||||
|
conffilepath = os.path.join(abspath, 'plugin.json')
|
||||||
|
if os.path.isfile(conffilepath):
|
||||||
|
plugin_manifest = json.load(open(conffilepath, 'r'))
|
||||||
|
|
||||||
|
for plugin_man_item in ["name", "version", "type"]:
|
||||||
|
if plugin_man_item not in list(plugin_manifest.keys()):
|
||||||
|
plugin_manifest[plugin_man_item] = None
|
||||||
|
|
||||||
|
self.config.register_version(os.path.basename(filename), plugin_manifest["version"], "plugin")
|
||||||
|
|
||||||
|
if plugin_manifest["type"] == "origin":
|
||||||
|
self.config.register_valid_origin_method(plugin_manifest["name"])
|
||||||
|
|
||||||
|
plugin_import_print_string = "Found %s type plugin: %s %s. " % (plugin_manifest["type"], plugin_manifest["name"], plugin_manifest["version"])
|
||||||
|
|
||||||
|
# Warn for multiple origins
|
||||||
|
if plugin_manifest["type"] == "origin" and len([plugin_name for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins if plugin_manifest["type"] == "origin"]):
|
||||||
|
plugin_import_print_string += " ImportWarning: Only one Origin Allowed."
|
||||||
|
|
||||||
|
if not any(plugin_manifest[plugin_item] for plugin_item in ["name", "version", "type"]):
|
||||||
|
plugin_import_print_string += " ImportWarning: Missing PLUGIN_* Value."
|
||||||
|
else:
|
||||||
|
|
||||||
|
# Single Plugin
|
||||||
|
if os.path.isfile(os.path.join(abspath, '__init__.py')):
|
||||||
|
plugin_manifest["tagged_mod"] = None
|
||||||
|
plugin_manifest["tagged_mod_type"] = None
|
||||||
|
self.found_plugins.append((os.path.basename(filename), abspath, plugin_conf, plugin_manifest))
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# Multi-Plugin
|
||||||
|
for subfilename in os.listdir(abspath):
|
||||||
|
subabspath = os.path.join(abspath, subfilename)
|
||||||
|
|
||||||
|
if os.path.isdir(subabspath):
|
||||||
|
|
||||||
|
subconffilepath = os.path.join(subabspath, 'plugin.json')
|
||||||
|
if os.path.isfile(subconffilepath):
|
||||||
|
subplugin_manifest = json.load(open(subconffilepath, 'r'))
|
||||||
|
|
||||||
|
for subplugin_man_item in ["name", "version", "type"]:
|
||||||
|
if subplugin_man_item not in list(subplugin_manifest.keys()):
|
||||||
|
subplugin_manifest[subplugin_man_item] = plugin_manifest[subplugin_man_item]
|
||||||
|
else:
|
||||||
|
subplugin_manifest = plugin_manifest
|
||||||
|
|
||||||
|
subplugin_manifest["tagged_mod"] = None
|
||||||
|
subplugin_manifest["tagged_mod_type"] = None
|
||||||
|
if plugin_manifest["type"] != subplugin_manifest["type"]:
|
||||||
|
subplugin_manifest["tagged_mod"] = plugin_manifest["name"]
|
||||||
|
subplugin_manifest["tagged_mod_type"] = plugin_manifest["type"]
|
||||||
|
|
||||||
|
if os.path.isfile(os.path.join(subabspath, '__init__.py')):
|
||||||
|
self.found_plugins.append((os.path.basename(filename), subabspath, plugin_conf, subplugin_manifest))
|
||||||
|
|
||||||
|
print(plugin_import_print_string)
|
||||||
|
self.load_plugin_configs()
|
||||||
|
|
||||||
|
def load_plugins(self, logger, db):
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins:
|
||||||
|
plugin_item = Plugin(self.config, self.logger, self.db, plugin_name, plugin_path, plugin_conf, plugin_manifest)
|
||||||
|
self.plugins[plugin_item.plugin_dict_name] = plugin_item
|
||||||
@ -6,8 +6,6 @@ import uuid
|
|||||||
from .pages import fHDHR_Pages
|
from .pages import fHDHR_Pages
|
||||||
from .files import fHDHR_Files
|
from .files import fHDHR_Files
|
||||||
from .brython import fHDHR_Brython
|
from .brython import fHDHR_Brython
|
||||||
from .hdhr import fHDHR_HDHR
|
|
||||||
from .rmg import fHDHR_RMG
|
|
||||||
from .api import fHDHR_API
|
from .api import fHDHR_API
|
||||||
|
|
||||||
|
|
||||||
@ -36,33 +34,16 @@ class fHDHR_HTTP_Server():
|
|||||||
|
|
||||||
self.route_list = {}
|
self.route_list = {}
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP Pages Endpoints.")
|
self.endpoints_obj = {}
|
||||||
self.pages = fHDHR_Pages(fhdhr)
|
self.endpoints_obj["pages"] = fHDHR_Pages(fhdhr)
|
||||||
self.add_endpoints(self.pages, "pages")
|
self.endpoints_obj["files"] = fHDHR_Files(fhdhr)
|
||||||
|
self.endpoints_obj["brython"] = fHDHR_Brython(fhdhr)
|
||||||
|
self.endpoints_obj["api"] = fHDHR_API(fhdhr)
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP Files Endpoints.")
|
self.selfadd_web_plugins()
|
||||||
self.files = fHDHR_Files(fhdhr)
|
for endpoint_type in list(self.endpoints_obj.keys()):
|
||||||
self.add_endpoints(self.files, "files")
|
self.fhdhr.logger.info("Loading HTTP %s Endpoints." % endpoint_type)
|
||||||
|
self.add_endpoints(endpoint_type)
|
||||||
self.fhdhr.logger.info("Loading HTTP Brython Endpoints.")
|
|
||||||
self.brython = fHDHR_Brython(fhdhr)
|
|
||||||
self.add_endpoints(self.brython, "brython")
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP HDHR Endpoints.")
|
|
||||||
self.hdhr = fHDHR_HDHR(fhdhr)
|
|
||||||
self.add_endpoints(self.hdhr, "hdhr")
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP RMG Endpoints.")
|
|
||||||
self.rmg = fHDHR_RMG(fhdhr)
|
|
||||||
self.add_endpoints(self.rmg, "rmg")
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP API Endpoints.")
|
|
||||||
self.api = fHDHR_API(fhdhr)
|
|
||||||
self.add_endpoints(self.api, "api")
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading HTTP Origin Endpoints.")
|
|
||||||
self.origin_endpoints = self.fhdhr.originwrapper.origin.origin_web.fHDHR_Origin_Web(fhdhr)
|
|
||||||
self.add_endpoints(self.origin_endpoints, "origin_endpoints")
|
|
||||||
|
|
||||||
self.fhdhr.app.before_request(self.before_request)
|
self.fhdhr.app.before_request(self.before_request)
|
||||||
self.fhdhr.app.after_request(self.after_request)
|
self.fhdhr.app.after_request(self.after_request)
|
||||||
@ -70,6 +51,16 @@ class fHDHR_HTTP_Server():
|
|||||||
|
|
||||||
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
|
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
|
||||||
|
|
||||||
|
def selfadd_web_plugins(self):
|
||||||
|
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
if self.fhdhr.plugins.plugins[plugin_name].type == "web":
|
||||||
|
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||||
|
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||||
|
try:
|
||||||
|
self.endpoints_obj[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils)
|
||||||
|
except Exception as e:
|
||||||
|
print(e)
|
||||||
|
|
||||||
def start(self):
|
def start(self):
|
||||||
self.fhdhr.logger.info("Flask HTTP Thread Starting")
|
self.fhdhr.logger.info("Flask HTTP Thread Starting")
|
||||||
self.fhdhr.threads["flask"].start()
|
self.fhdhr.threads["flask"].start()
|
||||||
@ -87,6 +78,8 @@ class fHDHR_HTTP_Server():
|
|||||||
session["instance_id"] = self.instance_id
|
session["instance_id"] = self.instance_id
|
||||||
session["route_list"] = self.route_list
|
session["route_list"] = self.route_list
|
||||||
|
|
||||||
|
session["user_agent"] = request.headers.get('User-Agent')
|
||||||
|
|
||||||
session["is_internal_api"] = self.detect_internal_api(request)
|
session["is_internal_api"] = self.detect_internal_api(request)
|
||||||
if session["is_internal_api"]:
|
if session["is_internal_api"]:
|
||||||
self.fhdhr.logger.debug("Client is using internal API call.")
|
self.fhdhr.logger.debug("Client is using internal API call.")
|
||||||
@ -153,49 +146,57 @@ class fHDHR_HTTP_Server():
|
|||||||
else:
|
else:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def add_endpoints(self, index_list, index_name):
|
def add_endpoints(self, index_name):
|
||||||
|
|
||||||
if index_name not in list(self.route_list.keys()):
|
item_list = [x for x in dir(self.endpoints_obj[index_name]) if self.isapath(x)]
|
||||||
self.route_list[index_name] = {}
|
endpoint_main = self.endpoints_obj[index_name]
|
||||||
|
endpoint_main.fhdhr.version # dummy line
|
||||||
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
|
||||||
for item in item_list:
|
for item in item_list:
|
||||||
endpoints = eval("self.%s.%s.%s" % (index_name, item, "endpoints"))
|
endpoints = eval("endpoint_main.%s.%s" % (item, "endpoints"))
|
||||||
if isinstance(endpoints, str):
|
if isinstance(endpoints, str):
|
||||||
endpoints = [endpoints]
|
endpoints = [endpoints]
|
||||||
handler = eval("self.%s.%s" % (index_name, item))
|
handler = eval("endpoint_main.%s" % item)
|
||||||
endpoint_name = eval("self.%s.%s.%s" % (index_name, item, "endpoint_name"))
|
endpoint_name = eval("endpoint_main.%s.%s" % (item, "endpoint_name"))
|
||||||
|
|
||||||
try:
|
try:
|
||||||
endpoint_methods = eval("self.%s.%s.%s" % (index_name, item, "endpoint_methods"))
|
endpoint_methods = eval("endpoint_main.%s.%s" % (item, "endpoint_methods"))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
endpoint_methods = ['GET']
|
endpoint_methods = ['GET']
|
||||||
|
|
||||||
try:
|
try:
|
||||||
endpoint_access_level = eval("self.%s.%s.%s" % (index_name, item, "endpoint_access_level"))
|
endpoint_access_level = eval("endpoint_main.%s.%s" % (item, "endpoint_access_level"))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
endpoint_access_level = 0
|
endpoint_access_level = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
pretty_name = eval("self.%s.%s.%s" % (index_name, item, "pretty_name"))
|
pretty_name = eval("endpoint_main.%s.%s" % (item, "pretty_name"))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
pretty_name = endpoint_name
|
pretty_name = endpoint_name
|
||||||
|
|
||||||
try:
|
try:
|
||||||
endpoint_default_parameters = eval("self.%s.%s.%s" % (index_name, item, "endpoint_default_parameters"))
|
endpoint_category = eval("endpoint_main.%s.%s" % (item, "endpoint_category"))
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_category = index_name
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoint_default_parameters = eval("endpoint_main.%s.%s" % (item, "endpoint_default_parameters"))
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
endpoint_default_parameters = {}
|
endpoint_default_parameters = {}
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||||
|
|
||||||
if endpoint_name not in list(self.route_list[index_name].keys()):
|
if endpoint_category not in list(self.route_list.keys()):
|
||||||
self.route_list[index_name][endpoint_name] = {}
|
self.route_list[endpoint_category] = {}
|
||||||
self.route_list[index_name][endpoint_name]["name"] = endpoint_name
|
|
||||||
self.route_list[index_name][endpoint_name]["endpoints"] = endpoints
|
if endpoint_name not in list(self.route_list[endpoint_category].keys()):
|
||||||
self.route_list[index_name][endpoint_name]["endpoint_methods"] = endpoint_methods
|
self.route_list[endpoint_category][endpoint_name] = {}
|
||||||
self.route_list[index_name][endpoint_name]["endpoint_access_level"] = endpoint_access_level
|
self.route_list[endpoint_category][endpoint_name]["name"] = endpoint_name
|
||||||
self.route_list[index_name][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
|
self.route_list[endpoint_category][endpoint_name]["endpoints"] = endpoints
|
||||||
self.route_list[index_name][endpoint_name]["pretty_name"] = pretty_name
|
self.route_list[endpoint_category][endpoint_name]["endpoint_methods"] = endpoint_methods
|
||||||
|
self.route_list[endpoint_category][endpoint_name]["endpoint_access_level"] = endpoint_access_level
|
||||||
|
self.route_list[endpoint_category][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
|
||||||
|
self.route_list[endpoint_category][endpoint_name]["pretty_name"] = pretty_name
|
||||||
|
self.route_list[endpoint_category][endpoint_name]["endpoint_category"] = endpoint_category
|
||||||
|
|
||||||
for endpoint in endpoints:
|
for endpoint in endpoints:
|
||||||
self.add_endpoint(endpoint=endpoint,
|
self.add_endpoint(endpoint=endpoint,
|
||||||
@ -204,7 +205,7 @@ class fHDHR_HTTP_Server():
|
|||||||
methods=endpoint_methods)
|
methods=endpoint_methods)
|
||||||
|
|
||||||
def isapath(self, item):
|
def isapath(self, item):
|
||||||
not_a_page_list = ["fhdhr"]
|
not_a_page_list = ["fhdhr", "plugin_utils"]
|
||||||
if item in not_a_page_list:
|
if item in not_a_page_list:
|
||||||
return False
|
return False
|
||||||
elif item.startswith("__") and item.endswith("__"):
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
from .root_url import Root_URL
|
from .root_url import Root_URL
|
||||||
from .startup_tasks import Startup_Tasks
|
from .startup_tasks import Startup_Tasks
|
||||||
|
|
||||||
from .cluster import Cluster
|
|
||||||
from .settings import Settings
|
from .settings import Settings
|
||||||
from .channels import Channels
|
from .channels import Channels
|
||||||
from .xmltv import xmlTV
|
from .xmltv import xmlTV
|
||||||
@ -11,7 +10,7 @@ from .w3u import W3U
|
|||||||
from .epg import EPG
|
from .epg import EPG
|
||||||
from .tuners import Tuners
|
from .tuners import Tuners
|
||||||
from .debug import Debug_JSON
|
from .debug import Debug_JSON
|
||||||
from .tools import API_Tools
|
from .plugins import Plugins_JSON
|
||||||
|
|
||||||
from .route_list import Route_List
|
from .route_list import Route_List
|
||||||
|
|
||||||
@ -26,7 +25,6 @@ class fHDHR_API():
|
|||||||
self.root_url = Root_URL(fhdhr)
|
self.root_url = Root_URL(fhdhr)
|
||||||
self.startup_tasks = Startup_Tasks(fhdhr)
|
self.startup_tasks = Startup_Tasks(fhdhr)
|
||||||
|
|
||||||
self.cluster = Cluster(fhdhr)
|
|
||||||
self.settings = Settings(fhdhr)
|
self.settings = Settings(fhdhr)
|
||||||
self.channels = Channels(fhdhr)
|
self.channels = Channels(fhdhr)
|
||||||
self.xmltv = xmlTV(fhdhr)
|
self.xmltv = xmlTV(fhdhr)
|
||||||
@ -35,7 +33,7 @@ class fHDHR_API():
|
|||||||
self.epg = EPG(fhdhr)
|
self.epg = EPG(fhdhr)
|
||||||
self.tuners = Tuners(fhdhr)
|
self.tuners = Tuners(fhdhr)
|
||||||
self.debug = Debug_JSON(fhdhr)
|
self.debug = Debug_JSON(fhdhr)
|
||||||
self.tools = API_Tools(fhdhr)
|
self.plugins = Plugins_JSON(fhdhr)
|
||||||
|
|
||||||
self.route_list = Route_List(fhdhr)
|
self.route_list = Route_List(fhdhr)
|
||||||
|
|
||||||
|
|||||||
@ -24,22 +24,38 @@ class Channels():
|
|||||||
method = request.args.get('method', default=None, type=str)
|
method = request.args.get('method', default=None, type=str)
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
origin = request.args.get('origin', default=None, type=str)
|
||||||
|
if origin and origin not in origin_methods:
|
||||||
|
return "%s Invalid channels origin" % origin
|
||||||
|
|
||||||
if method == "get":
|
if method == "get":
|
||||||
channels_info = {}
|
channels_info = {}
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
if not origin:
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
origin_list = origin_methods
|
||||||
channel_dict = channel_obj.dict.copy()
|
else:
|
||||||
channel_dict["m3u_url"] = channel_obj.m3u_url
|
origin_list = [origin]
|
||||||
channel_dict["stream_url"] = channel_obj.api_stream_url
|
|
||||||
channels_info[channel_obj.number] = channel_dict
|
|
||||||
|
|
||||||
# Sort the channels
|
for origin_item in origin_list:
|
||||||
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
|
||||||
sorted_chan_guide = []
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide.append(channels_info[channel])
|
|
||||||
|
|
||||||
channels_info_json = json.dumps(sorted_chan_guide, indent=4)
|
channels_info[origin_item] = {}
|
||||||
|
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin=origin_item)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[origin_item][fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["m3u_url"] = channel_obj.api_m3u_url
|
||||||
|
channel_dict["stream_url"] = channel_obj.api_stream_url
|
||||||
|
channels_info[origin_item][channel_obj.number] = channel_dict
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channels_info[origin_item].keys()))
|
||||||
|
sorted_chan_guide = []
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
sorted_chan_guide.append(channels_info[origin_item][channel])
|
||||||
|
|
||||||
|
channels_info[origin_item] = sorted_chan_guide
|
||||||
|
|
||||||
|
channels_info_json = json.dumps(channels_info, indent=4)
|
||||||
|
|
||||||
return Response(status=200,
|
return Response(status=200,
|
||||||
response=channels_info_json,
|
response=channels_info_json,
|
||||||
@ -59,18 +75,18 @@ class Channels():
|
|||||||
channel_method = channel[0]
|
channel_method = channel[0]
|
||||||
channel_number = channel[1:]
|
channel_number = channel[1:]
|
||||||
|
|
||||||
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
||||||
response = Response("Not Found", status=404)
|
response = Response("Not Found", status=404)
|
||||||
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
abort(response)
|
abort(response)
|
||||||
|
|
||||||
if channel_method == "+":
|
if channel_method == "+":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
||||||
elif channel_method == "-":
|
elif channel_method == "-":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
||||||
elif channel_method == "x":
|
elif channel_method == "x":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle", origin)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
||||||
@ -79,14 +95,14 @@ class Channels():
|
|||||||
elif method in ["enable", "disable"]:
|
elif method in ["enable", "disable"]:
|
||||||
channel = request.args.get('channel', default=None, type=str)
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
if channel == "all":
|
if channel == "all":
|
||||||
self.fhdhr.device.channels.set_channel_enablement_all(method)
|
self.fhdhr.device.channels.set_channel_enablement_all(method, origin)
|
||||||
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
||||||
if redirect_url:
|
if redirect_url:
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
||||||
else:
|
else:
|
||||||
return "%s Falied" % method
|
return "%s Falied" % method
|
||||||
else:
|
else:
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel, method)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel, method, origin)
|
||||||
|
|
||||||
elif method == "update":
|
elif method == "update":
|
||||||
channel_id = request.form.get('id', None)
|
channel_id = request.form.get('id', None)
|
||||||
@ -111,7 +127,7 @@ class Channels():
|
|||||||
updatedict[key] = confvalue
|
updatedict[key] = confvalue
|
||||||
elif key in ["favorite", "HD"]:
|
elif key in ["favorite", "HD"]:
|
||||||
updatedict[key] = int(request.form.get(key))
|
updatedict[key] = int(request.form.get(key))
|
||||||
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
|
||||||
|
|
||||||
elif method == "modify":
|
elif method == "modify":
|
||||||
channels_list = json.loads(request.form.get('channels', []))
|
channels_list = json.loads(request.form.get('channels', []))
|
||||||
@ -139,10 +155,10 @@ class Channels():
|
|||||||
updatedict[key] = int(channel[key])
|
updatedict[key] = int(channel[key])
|
||||||
else:
|
else:
|
||||||
channel_id = str(channel[key])
|
channel_id = str(channel[key])
|
||||||
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
|
||||||
|
|
||||||
elif method == "scan":
|
elif method == "scan":
|
||||||
self.fhdhr.device.channels.get_channels(forceupdate=True)
|
self.fhdhr.device.channels.get_channels(forceupdate=True, origin=origin)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
return "Invalid Method"
|
return "Invalid Method"
|
||||||
|
|||||||
@ -19,11 +19,16 @@ class Debug_JSON():
|
|||||||
|
|
||||||
debugjson = {
|
debugjson = {
|
||||||
"base_url": base_url,
|
"base_url": base_url,
|
||||||
"total channels": len(self.fhdhr.device.channels.list),
|
|
||||||
"tuner status": self.fhdhr.device.tuners.status(),
|
|
||||||
}
|
}
|
||||||
cluster_json = json.dumps(debugjson, indent=4)
|
|
||||||
|
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||||
|
debugjson[origin] = {
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(origin),
|
||||||
|
"total channels": len(list(self.fhdhr.device.channels.list[origin].keys()))
|
||||||
|
}
|
||||||
|
|
||||||
|
debug_json = json.dumps(debugjson, indent=4)
|
||||||
|
|
||||||
return Response(status=200,
|
return Response(status=200,
|
||||||
response=cluster_json,
|
response=debug_json,
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
|
|||||||
@ -23,18 +23,18 @@ class EPG():
|
|||||||
method = request.args.get('method', default="get", type=str)
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
||||||
return "%s Invalid xmltv method" % source
|
return "%s Invalid epg method" % source
|
||||||
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
if method == "get":
|
if method == "get":
|
||||||
|
|
||||||
epgdict = self.fhdhr.device.epg.get_epg(source)
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if source in self.fhdhr.origins.valid_origins:
|
||||||
epgdict = epgdict.copy()
|
epgdict = epgdict.copy()
|
||||||
for c in list(epgdict.keys()):
|
for c in list(epgdict.keys()):
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
@ -93,14 +93,14 @@ class EPG():
|
|||||||
else:
|
else:
|
||||||
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
|
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
|
||||||
|
|
||||||
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if source in self.fhdhr.origins.valid_origins:
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"])
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"], source)
|
||||||
|
|
||||||
chan_dict["name"] = chan_obj.dict["name"]
|
chan_dict["name"] = chan_obj.dict["name"]
|
||||||
chan_dict["number"] = chan_obj.number
|
chan_dict["number"] = chan_obj.number
|
||||||
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
||||||
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
||||||
chan_dict["m3u_url"] = chan_obj.m3u_url
|
chan_dict["m3u_url"] = chan_obj.api_m3u_url
|
||||||
|
|
||||||
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
|
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
|
||||||
else:
|
else:
|
||||||
|
|||||||
@ -31,7 +31,7 @@ class Images():
|
|||||||
|
|
||||||
elif method == "get":
|
elif method == "get":
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
if source in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
if source in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
||||||
image_type = request.args.get('type', default="content", type=str)
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
if image_type in ["content", "channel"]:
|
if image_type in ["content", "channel"]:
|
||||||
image_id = request.args.get('id', default=None, type=str)
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
|
|||||||
@ -26,6 +26,11 @@ class M3U():
|
|||||||
|
|
||||||
if method == "get":
|
if method == "get":
|
||||||
|
|
||||||
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
origin = request.args.get('origin', default=None, type=str)
|
||||||
|
if origin and origin not in origin_methods:
|
||||||
|
return "%s Invalid channels origin" % origin
|
||||||
|
|
||||||
FORMAT_DESCRIPTOR = "#EXTM3U"
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
RECORD_MARKER = "#EXTINF"
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
@ -37,14 +42,36 @@ class M3U():
|
|||||||
|
|
||||||
channel_items = []
|
channel_items = []
|
||||||
|
|
||||||
if channel == "all":
|
if origin:
|
||||||
fileName = "channels.m3u"
|
if channel == "all":
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
fileName = "channels.m3u"
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel, origin)
|
||||||
|
fileName = "%s.m3u" % channel_obj.number
|
||||||
if channel_obj.enabled:
|
if channel_obj.enabled:
|
||||||
channel_items.append(channel_obj)
|
channel_items.append(channel_obj)
|
||||||
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
else:
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
return "Channel Disabled"
|
||||||
|
elif channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id", origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel, origin)
|
||||||
|
fileName = "%s.m3u" % channel_obj.number
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
elif not origin and channel == "all":
|
||||||
|
fileName = "channels.m3u"
|
||||||
|
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif not origin and channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel)
|
||||||
fileName = "%s.m3u" % channel_obj.number
|
fileName = "%s.m3u" % channel_obj.number
|
||||||
if channel_obj.enabled:
|
if channel_obj.enabled:
|
||||||
channel_items.append(channel_obj)
|
channel_items.append(channel_obj)
|
||||||
@ -68,7 +95,7 @@ class M3U():
|
|||||||
"tvg-name": str(channel_obj.dict['name']),
|
"tvg-name": str(channel_obj.dict['name']),
|
||||||
"tvg-id": str(channel_obj.number),
|
"tvg-id": str(channel_obj.number),
|
||||||
"tvg-logo": logourl,
|
"tvg-logo": logourl,
|
||||||
"group-title": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
"group-title": channel_obj.origin,
|
||||||
"group-titleb": str(channel_obj.dict['name']),
|
"group-titleb": str(channel_obj.dict['name']),
|
||||||
"stream_url": "%s%s" % (base_url, channel_obj.api_stream_url)
|
"stream_url": "%s%s" % (base_url, channel_obj.api_stream_url)
|
||||||
}
|
}
|
||||||
|
|||||||
30
fHDHR_web/api/plugins.py
Normal file
30
fHDHR_web/api/plugins.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Plugins_JSON():
|
||||||
|
endpoints = ["/api/plugins"]
|
||||||
|
endpoint_name = "api_plugins"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
pluginsjson = {}
|
||||||
|
|
||||||
|
for plugin in list(self.fhdhr.plugins.plugins.keys()):
|
||||||
|
pluginsjson[plugin] = {
|
||||||
|
"name": plugin,
|
||||||
|
"manifest": self.fhdhr.plugins.plugins[plugin].manifest
|
||||||
|
}
|
||||||
|
|
||||||
|
plugins_json = json.dumps(pluginsjson, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=plugins_json,
|
||||||
|
mimetype='application/json')
|
||||||
@ -1,4 +1,4 @@
|
|||||||
from flask import redirect, request, session
|
from flask import redirect
|
||||||
|
|
||||||
|
|
||||||
class Root_URL():
|
class Root_URL():
|
||||||
@ -13,20 +13,4 @@ class Root_URL():
|
|||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
return redirect("/index")
|
||||||
user_agent = request.headers.get('User-Agent')
|
|
||||||
|
|
||||||
# Client Devices Discovering Device Information
|
|
||||||
if not user_agent or session["is_plexmediaserver"]:
|
|
||||||
|
|
||||||
# Plex Remote Media Grabber redirect
|
|
||||||
if self.fhdhr.config.dict["rmg"]["enabled"] and session["is_plexmediaserver"]:
|
|
||||||
return redirect("/rmg")
|
|
||||||
|
|
||||||
# Client Device is looking for HDHR type device
|
|
||||||
else:
|
|
||||||
return redirect("/hdhr/device.xml")
|
|
||||||
|
|
||||||
# Anything Else is likely a Web Browser
|
|
||||||
else:
|
|
||||||
return redirect("/index")
|
|
||||||
|
|||||||
@ -30,11 +30,8 @@ class Settings():
|
|||||||
web_settings_dict[config_section] = {}
|
web_settings_dict[config_section] = {}
|
||||||
|
|
||||||
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
real_config_section = config_section
|
|
||||||
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
|
||||||
real_config_section = "origin"
|
|
||||||
web_settings_dict[config_section][config_item] = {
|
web_settings_dict[config_section][config_item] = {
|
||||||
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
"value": self.fhdhr.config.dict[config_section][config_item],
|
||||||
}
|
}
|
||||||
if self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]:
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]:
|
||||||
web_settings_dict[config_section][config_item]["value"] = "***********"
|
web_settings_dict[config_section][config_item]["value"] = "***********"
|
||||||
@ -56,10 +53,7 @@ class Settings():
|
|||||||
else:
|
else:
|
||||||
return "%s Falied" % method
|
return "%s Falied" % method
|
||||||
|
|
||||||
if config_section == "origin":
|
self.fhdhr.config.write(config_name, config_value, config_section)
|
||||||
config_section = self.fhdhr.config.dict["main"]["dictpopname"]
|
|
||||||
|
|
||||||
self.fhdhr.config.write(config_section, config_name, config_value)
|
|
||||||
|
|
||||||
elif method == "restart":
|
elif method == "restart":
|
||||||
restart_thread = threading.Thread(target=self.restart_thread)
|
restart_thread = threading.Thread(target=self.restart_thread)
|
||||||
|
|||||||
@ -16,6 +16,8 @@ class Startup_Tasks():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Running Startup Tasks.")
|
||||||
|
|
||||||
# Hit Channel Update API
|
# Hit Channel Update API
|
||||||
haseverscanned = self.fhdhr.db.get_fhdhr_value("channels", "scanned_time")
|
haseverscanned = self.fhdhr.db.get_fhdhr_value("channels", "scanned_time")
|
||||||
updatechannels = False
|
updatechannels = False
|
||||||
@ -25,10 +27,13 @@ class Startup_Tasks():
|
|||||||
updatechannels = True
|
updatechannels = True
|
||||||
|
|
||||||
if updatechannels:
|
if updatechannels:
|
||||||
self.fhdhr.api.get(self.channel_update_url)
|
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||||
|
self.fhdhr.api.get("%s&origin=%s" % (self.channel_update_url, origin))
|
||||||
|
|
||||||
# Hit EPG Update API
|
# Hit EPG Update API
|
||||||
for epg_method in self.fhdhr.device.epg.epg_methods:
|
for epg_method in self.fhdhr.device.epg.epg_methods:
|
||||||
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Startup Tasks Complete.")
|
||||||
|
|
||||||
return "Success"
|
return "Success"
|
||||||
|
|||||||
@ -16,10 +16,6 @@ class Tuners():
|
|||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.quality = self.fhdhr.config.dict["streaming"]["quality"]
|
|
||||||
if self.quality:
|
|
||||||
self.quality = str(self.quality).lower()
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
@ -35,33 +31,60 @@ class Tuners():
|
|||||||
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
if method in ["direct", "ffmpeg", "vlc"]:
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
origin = request.args.get('origin', default=None, type=str)
|
||||||
|
if origin and origin not in origin_methods:
|
||||||
|
return "%s Invalid channels origin" % origin
|
||||||
|
|
||||||
|
if method == "stream":
|
||||||
|
|
||||||
channel_number = request.args.get('channel', None, type=str)
|
channel_number = request.args.get('channel', None, type=str)
|
||||||
if not channel_number:
|
if not channel_number:
|
||||||
return "Missing Channel"
|
return "Missing Channel"
|
||||||
|
|
||||||
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
if origin:
|
||||||
response = Response("Not Found", status=404)
|
|
||||||
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
|
||||||
abort(response)
|
|
||||||
|
|
||||||
channel_dict = self.fhdhr.device.channels.get_channel_dict("number", channel_number)
|
if str(channel_number) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
||||||
if not channel_dict["enabled"]:
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("number", channel_number, origin)
|
||||||
|
elif str(channel_number) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id", origin)]:
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("id", channel_number, origin)
|
||||||
|
else:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
if str(channel_number) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id")]:
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("id", channel_number)
|
||||||
|
else:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if not chan_obj.dict["enabled"]:
|
||||||
response = Response("Service Unavailable", status=503)
|
response = Response("Service Unavailable", status=503)
|
||||||
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
abort(response)
|
abort(response)
|
||||||
|
|
||||||
|
origin = chan_obj.origin
|
||||||
|
channel_number = chan_obj.number
|
||||||
|
|
||||||
|
stream_method = request.args.get('stream_method', default=self.fhdhr.origins.origins_dict[origin].stream_method, type=str)
|
||||||
|
if stream_method not in list(self.fhdhr.config.dict["streaming"]["valid_methods"].keys()):
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
|
abort(response)
|
||||||
|
|
||||||
duration = request.args.get('duration', default=0, type=int)
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
transcode = request.args.get('transcode', default=self.quality, type=str)
|
transcode_quality = request.args.get('transcode', default=None, type=str)
|
||||||
valid_transcode_types = [
|
valid_transcode_types = [None, "heavy", "mobile", "internet720", "internet480", "internet360", "internet240"]
|
||||||
None, "high", "medium", "low"
|
if transcode_quality not in valid_transcode_types:
|
||||||
"heavy", "mobile", "internet720", "internet480", "internet360", "internet240"
|
|
||||||
]
|
|
||||||
if transcode not in valid_transcode_types:
|
|
||||||
response = Response("Service Unavailable", status=503)
|
response = Response("Service Unavailable", status=503)
|
||||||
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
@ -69,9 +92,11 @@ class Tuners():
|
|||||||
|
|
||||||
stream_args = {
|
stream_args = {
|
||||||
"channel": channel_number,
|
"channel": channel_number,
|
||||||
"method": method,
|
"origin": origin,
|
||||||
|
"method": stream_method,
|
||||||
"duration": duration,
|
"duration": duration,
|
||||||
"transcode": transcode,
|
"origin_quality": self.fhdhr.config.dict["streaming"]["origin_quality"],
|
||||||
|
"transcode_quality": transcode_quality or self.fhdhr.config.dict["streaming"]["transcode_quality"],
|
||||||
"accessed": accessed_url,
|
"accessed": accessed_url,
|
||||||
"client": client_address,
|
"client": client_address,
|
||||||
"client_id": session["session_id"]
|
"client_id": session["session_id"]
|
||||||
@ -79,9 +104,9 @@ class Tuners():
|
|||||||
|
|
||||||
try:
|
try:
|
||||||
if not tuner_number:
|
if not tuner_number:
|
||||||
tunernum = self.fhdhr.device.tuners.first_available(channel_number)
|
tunernum = self.fhdhr.device.tuners.first_available(origin, channel_number)
|
||||||
else:
|
else:
|
||||||
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, channel_number)
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, origin, channel_number)
|
||||||
except TunerError as e:
|
except TunerError as e:
|
||||||
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
@ -90,52 +115,79 @@ class Tuners():
|
|||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
abort(response)
|
abort(response)
|
||||||
|
|
||||||
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
tuner = self.fhdhr.device.tuners.tuners[origin][str(tunernum)]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
||||||
except TunerError as e:
|
except TunerError as e:
|
||||||
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
self.fhdhr.logger.info("A %s stream request for %s channel %s was rejected due to %s"
|
||||||
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
% (origin, stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
response = Response("Service Unavailable", status=503)
|
response = Response("Service Unavailable", status=503)
|
||||||
response.headers["X-fHDHR-Error"] = str(e)
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
tuner.close()
|
tuner.close()
|
||||||
abort(response)
|
abort(response)
|
||||||
|
|
||||||
self.fhdhr.logger.info("Tuner #%s to be used for stream." % tunernum)
|
self.fhdhr.logger.info("%s Tuner #%s to be used for stream." % (origin, tunernum))
|
||||||
tuner.set_status(stream_args)
|
tuner.set_status(stream_args)
|
||||||
session["tuner_used"] = tunernum
|
session["tuner_used"] = tunernum
|
||||||
|
|
||||||
return Response(stream_with_context(tuner.get_stream(stream_args, tuner)), mimetype=stream_args["content_type"])
|
try:
|
||||||
|
stream = tuner.get_stream(stream_args, tuner)
|
||||||
|
except TunerError as e:
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
tuner.close()
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
return Response(stream_with_context(stream.get()), mimetype=stream_args["content_type"])
|
||||||
|
|
||||||
elif method == "close":
|
elif method == "close":
|
||||||
|
|
||||||
if not tuner_number or str(tuner_number) not in list(self.fhdhr.device.tuners.tuners.keys()):
|
if not origin:
|
||||||
|
return "Missing Origin"
|
||||||
|
|
||||||
|
if not tuner_number or str(tuner_number) not in list(self.fhdhr.device.tuners.tuners[origin].keys()):
|
||||||
return "%s Invalid tuner" % str(tuner_number)
|
return "%s Invalid tuner" % str(tuner_number)
|
||||||
|
|
||||||
session["tuner_used"] = tuner_number
|
session["tuner_used"] = tuner_number
|
||||||
|
|
||||||
tuner = self.fhdhr.device.tuners.tuners[str(tuner_number)]
|
tuner = self.fhdhr.device.tuners.tuners[origin][str(tuner_number)]
|
||||||
tuner.close()
|
tuner.close()
|
||||||
|
|
||||||
elif method == "scan":
|
elif method == "scan":
|
||||||
|
|
||||||
if not tuner_number:
|
if not origin:
|
||||||
tunernum = self.fhdhr.device.tuners.first_available(None)
|
for origin in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available(origin, None)
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, origin, None)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[origin][str(tunernum)]
|
||||||
|
tuner.channel_scan(origin=origin, grabbed=False)
|
||||||
else:
|
else:
|
||||||
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, None)
|
if not tuner_number:
|
||||||
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
tunernum = self.fhdhr.device.tuners.first_available(origin, None)
|
||||||
tuner.channel_scan(grabbed=True)
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, origin, None)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[origin][str(tunernum)]
|
||||||
|
tuner.channel_scan(origin=origin, grabbed=True)
|
||||||
|
|
||||||
elif method == "status":
|
elif method == "status":
|
||||||
|
|
||||||
if not tuner_number:
|
if not origin:
|
||||||
tuner_status = self.fhdhr.device.tuners.status()
|
if not tuner_number:
|
||||||
elif str(tuner_number) in list(self.fhdhr.device.tuners.tuners.keys()):
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
tuner_status = self.fhdhr.device.tuners.tuners[str(tuner_number)].get_status()
|
else:
|
||||||
|
tuner_status = ["Invalid Tuner %s" % tuner_number]
|
||||||
else:
|
else:
|
||||||
tuner_status = ["Invalid Tuner %s" % tuner_number]
|
|
||||||
|
if not tuner_number:
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status(origin)
|
||||||
|
elif str(tuner_number) in list(self.fhdhr.device.tuners.tuners[origin].keys()):
|
||||||
|
tuner_status = self.fhdhr.device.tuners.tuners[origin][str(tuner_number)].get_status()
|
||||||
|
else:
|
||||||
|
tuner_status = ["Invalid Tuner %s" % tuner_number]
|
||||||
|
|
||||||
tuner_status_json = json.dumps(tuner_status, indent=4)
|
tuner_status_json = json.dumps(tuner_status, indent=4)
|
||||||
|
|
||||||
|
|||||||
@ -26,7 +26,12 @@ class W3U():
|
|||||||
|
|
||||||
if method == "get":
|
if method == "get":
|
||||||
|
|
||||||
channel_info_m3u = {
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
origin = request.args.get('origin', default=None, type=str)
|
||||||
|
if origin and origin not in origin_methods:
|
||||||
|
return "%s Invalid channels origin" % origin
|
||||||
|
|
||||||
|
channel_info_w3u = {
|
||||||
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
"image": '%s/favicon.ico' % base_url,
|
"image": '%s/favicon.ico' % base_url,
|
||||||
"epg": '%s/api/xmltv' % base_url,
|
"epg": '%s/api/xmltv' % base_url,
|
||||||
@ -35,14 +40,36 @@ class W3U():
|
|||||||
|
|
||||||
channel_items = []
|
channel_items = []
|
||||||
|
|
||||||
if channel == "all":
|
if origin:
|
||||||
fileName = "channels.w3u"
|
if channel == "all":
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
fileName = "channels.w3u"
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel, origin)
|
||||||
|
fileName = "%s.w3u" % channel_obj.number
|
||||||
if channel_obj.enabled:
|
if channel_obj.enabled:
|
||||||
channel_items.append(channel_obj)
|
channel_items.append(channel_obj)
|
||||||
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
else:
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
return "Channel Disabled"
|
||||||
|
elif channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id", origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel, origin)
|
||||||
|
fileName = "%s.w3u" % channel_obj.number
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
elif not origin and channel == "all":
|
||||||
|
fileName = "channels.w3u"
|
||||||
|
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif not origin and channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel)
|
||||||
fileName = "%s.w3u" % channel_obj.number
|
fileName = "%s.w3u" % channel_obj.number
|
||||||
if channel_obj.enabled:
|
if channel_obj.enabled:
|
||||||
channel_items.append(channel_obj)
|
channel_items.append(channel_obj)
|
||||||
@ -71,9 +98,9 @@ class W3U():
|
|||||||
# Sort the channels
|
# Sort the channels
|
||||||
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
||||||
for channel in sorted_channel_list:
|
for channel in sorted_channel_list:
|
||||||
channel_info_m3u["stations"].append(channels_info[channel])
|
channel_info_w3u["stations"].append(channels_info[channel])
|
||||||
|
|
||||||
channels_info_json = json.dumps(channel_info_m3u, indent=4)
|
channels_info_json = json.dumps(channel_info_w3u, indent=4)
|
||||||
|
|
||||||
resp = Response(status=200, response=channels_info_json, mimetype='application/json')
|
resp = Response(status=200, response=channels_info_json, mimetype='application/json')
|
||||||
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
|
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
|
||||||
|
|||||||
@ -38,7 +38,7 @@ class xmlTV():
|
|||||||
method = request.args.get('method', default="get", type=str)
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
||||||
return "%s Invalid xmltv method" % source
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
@ -47,10 +47,10 @@ class xmlTV():
|
|||||||
|
|
||||||
epgdict = self.fhdhr.device.epg.get_epg(source)
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
|
||||||
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if source in self.fhdhr.origins.valid_origins:
|
||||||
epgdict = epgdict.copy()
|
epgdict = epgdict.copy()
|
||||||
for c in list(epgdict.keys()):
|
for c in list(epgdict.keys()):
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
@ -113,9 +113,9 @@ class xmlTV():
|
|||||||
|
|
||||||
out = self.xmltv_headers()
|
out = self.xmltv_headers()
|
||||||
|
|
||||||
if source in ["origin", "blocks", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if source in self.fhdhr.origins.valid_origins:
|
||||||
for c in list(epgdict.keys()):
|
for c in list(epgdict.keys()):
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
|||||||
@ -36,9 +36,10 @@ def chan_edit_data(items, channel_id):
|
|||||||
|
|
||||||
|
|
||||||
def chan_edit_postform(chanlist):
|
def chan_edit_postform(chanlist):
|
||||||
|
origin = document["origin"].value
|
||||||
postForm = document.createElement('form')
|
postForm = document.createElement('form')
|
||||||
postForm.method = "POST"
|
postForm.method = "POST"
|
||||||
postForm.action = "/api/channels?method=modify&redirect=/channels_editor"
|
postForm.action = "/api/channels?method=modify&origin=%s&redirect=/channels_editor&origin=%s" % (origin, origin)
|
||||||
postForm.setRequestHeader = "('Content-Type', 'application/json')"
|
postForm.setRequestHeader = "('Content-Type', 'application/json')"
|
||||||
|
|
||||||
postData = document.createElement('input')
|
postData = document.createElement('input')
|
||||||
|
|||||||
@ -2,7 +2,6 @@
|
|||||||
|
|
||||||
from .favicon_ico import Favicon_ICO
|
from .favicon_ico import Favicon_ICO
|
||||||
from .style_css import Style_CSS
|
from .style_css import Style_CSS
|
||||||
from .device_xml import Device_XML
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Files():
|
class fHDHR_Files():
|
||||||
@ -12,4 +11,3 @@ class fHDHR_Files():
|
|||||||
|
|
||||||
self.favicon = Favicon_ICO(fhdhr)
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
self.style = Style_CSS(fhdhr)
|
self.style = Style_CSS(fhdhr)
|
||||||
self.device_xml = Device_XML(fhdhr)
|
|
||||||
|
|||||||
@ -1,19 +0,0 @@
|
|||||||
from flask import redirect, session
|
|
||||||
|
|
||||||
|
|
||||||
class Device_XML():
|
|
||||||
endpoints = ["/device.xml"]
|
|
||||||
endpoint_name = "file_device_xml"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
if self.fhdhr.config.dict["rmg"]["enabled"] and session["is_plexmediaserver"]:
|
|
||||||
return redirect("/rmg/device.xml")
|
|
||||||
else:
|
|
||||||
return redirect("/hdhr/device.xml")
|
|
||||||
@ -1,46 +0,0 @@
|
|||||||
from flask import Response, request
|
|
||||||
import json
|
|
||||||
|
|
||||||
from fHDHR.tools import channel_sort
|
|
||||||
|
|
||||||
|
|
||||||
class Lineup_JSON():
|
|
||||||
endpoints = ["/lineup.json", "/hdhr/lineup.json"]
|
|
||||||
endpoint_name = "hdhr_lineup_json"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
|
||||||
|
|
||||||
show = request.args.get('show', default="all", type=str)
|
|
||||||
|
|
||||||
channelslist = {}
|
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
|
||||||
if channel_obj.enabled or show == "found":
|
|
||||||
lineup_dict = channel_obj.lineup_dict
|
|
||||||
lineup_dict["URL"] = "%s%s" % (base_url, lineup_dict["URL"])
|
|
||||||
if show == "found" and channel_obj.enabled:
|
|
||||||
lineup_dict["Enabled"] = 1
|
|
||||||
elif show == "found" and not channel_obj.enabled:
|
|
||||||
lineup_dict["Enabled"] = 0
|
|
||||||
|
|
||||||
channelslist[channel_obj.number] = lineup_dict
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(channelslist.keys()))
|
|
||||||
sorted_chan_guide = []
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide.append(channelslist[channel])
|
|
||||||
|
|
||||||
lineup_json = json.dumps(sorted_chan_guide, indent=4)
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=lineup_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
@ -3,14 +3,12 @@
|
|||||||
from .index_html import Index_HTML
|
from .index_html import Index_HTML
|
||||||
from .channels_html import Channels_HTML
|
from .channels_html import Channels_HTML
|
||||||
from .guide_html import Guide_HTML
|
from .guide_html import Guide_HTML
|
||||||
from .cluster_html import Cluster_HTML
|
|
||||||
from .tuners_html import Tuners_HTML
|
from .tuners_html import Tuners_HTML
|
||||||
from .xmltv_html import xmlTV_HTML
|
from .xmltv_html import xmlTV_HTML
|
||||||
from .version_html import Version_HTML
|
from .version_html import Version_HTML
|
||||||
from .diagnostics_html import Diagnostics_HTML
|
from .diagnostics_html import Diagnostics_HTML
|
||||||
from .settings_html import Settings_HTML
|
from .settings_html import Settings_HTML
|
||||||
from .channels_editor_html import Channels_Editor_HTML
|
from .channels_editor_html import Channels_Editor_HTML
|
||||||
from .tools_html import Tools_HTML
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Pages():
|
class fHDHR_Pages():
|
||||||
@ -22,10 +20,8 @@ class fHDHR_Pages():
|
|||||||
self.channels_html = Channels_HTML(fhdhr)
|
self.channels_html = Channels_HTML(fhdhr)
|
||||||
self.channels_editor_html = Channels_Editor_HTML(fhdhr)
|
self.channels_editor_html = Channels_Editor_HTML(fhdhr)
|
||||||
self.guide_html = Guide_HTML(fhdhr)
|
self.guide_html = Guide_HTML(fhdhr)
|
||||||
self.cluster_html = Cluster_HTML(fhdhr)
|
|
||||||
self.tuners_html = Tuners_HTML(fhdhr)
|
self.tuners_html = Tuners_HTML(fhdhr)
|
||||||
self.xmltv_html = xmlTV_HTML(fhdhr)
|
self.xmltv_html = xmlTV_HTML(fhdhr)
|
||||||
self.version_html = Version_HTML(fhdhr)
|
self.version_html = Version_HTML(fhdhr)
|
||||||
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
||||||
self.settings_html = Settings_HTML(fhdhr)
|
self.settings_html = Settings_HTML(fhdhr)
|
||||||
self.tools_html = Tools_HTML(fhdhr)
|
|
||||||
|
|||||||
@ -7,6 +7,7 @@ class Channels_Editor_HTML():
|
|||||||
endpoints = ["/channels_editor", "/channels_editor.html"]
|
endpoints = ["/channels_editor", "/channels_editor.html"]
|
||||||
endpoint_name = "page_channels_editor_html"
|
endpoint_name = "page_channels_editor_html"
|
||||||
endpoint_access_level = 2
|
endpoint_access_level = 2
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "Channels Editor"
|
pretty_name = "Channels Editor"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -17,14 +18,19 @@ class Channels_Editor_HTML():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
|
origin = request.args.get('origin', default=self.fhdhr.device.epg.def_method, type=str)
|
||||||
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
if origin not in origin_methods:
|
||||||
|
origin = origin_methods[0]
|
||||||
|
|
||||||
channelslist = {}
|
channelslist = {}
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
channel_dict = channel_obj.dict.copy()
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
|
||||||
channel_dict["number"] = channel_obj.number
|
channel_dict["number"] = channel_obj.number
|
||||||
channel_dict["chan_thumbnail"] = channel_obj.thumbnail
|
channel_dict["chan_thumbnail"] = channel_obj.thumbnail
|
||||||
channel_dict["m3u_url"] = channel_obj.m3u_url
|
channel_dict["m3u_url"] = channel_obj.api_m3u_url
|
||||||
|
|
||||||
channelslist[channel_dict["number"]] = channel_dict
|
channelslist[channel_dict["number"]] = channel_dict
|
||||||
|
|
||||||
@ -34,4 +40,4 @@ class Channels_Editor_HTML():
|
|||||||
for channel in sorted_channel_list:
|
for channel in sorted_channel_list:
|
||||||
sorted_chan_guide.append(channelslist[channel])
|
sorted_chan_guide.append(channelslist[channel])
|
||||||
|
|
||||||
return render_template('channels_editor.html', session=session, request=request, fhdhr=self.fhdhr, channelslist=sorted_chan_guide, list=list)
|
return render_template('channels_editor.html', request=request, session=session, fhdhr=self.fhdhr, channelslist=sorted_chan_guide, origin=origin, origin_methods=origin_methods, list=list)
|
||||||
|
|||||||
@ -17,19 +17,24 @@ class Channels_HTML():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
|
origin = request.args.get('origin', default=self.fhdhr.device.epg.def_method, type=str)
|
||||||
|
origin_methods = self.fhdhr.origins.valid_origins
|
||||||
|
if origin not in origin_methods:
|
||||||
|
origin = origin_methods[0]
|
||||||
|
|
||||||
channels_dict = {
|
channels_dict = {
|
||||||
"Total Channels": len(self.fhdhr.device.channels.get_channels()),
|
"Total Channels": len(self.fhdhr.device.channels.get_channels(origin)),
|
||||||
"Enabled": 0
|
"Enabled": 0
|
||||||
}
|
}
|
||||||
|
|
||||||
channelslist = {}
|
channelslist = {}
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
||||||
channel_dict = channel_obj.dict.copy()
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
|
||||||
channel_dict["number"] = channel_obj.number
|
channel_dict["number"] = channel_obj.number
|
||||||
channel_dict["chan_thumbnail"] = channel_obj.thumbnail
|
channel_dict["chan_thumbnail"] = channel_obj.thumbnail
|
||||||
channel_dict["m3u_url"] = channel_obj.m3u_url
|
channel_dict["m3u_url"] = channel_obj.api_m3u_url
|
||||||
|
|
||||||
channelslist[channel_dict["number"]] = channel_dict
|
channelslist[channel_dict["number"]] = channel_dict
|
||||||
if channel_dict["enabled"]:
|
if channel_dict["enabled"]:
|
||||||
@ -41,4 +46,4 @@ class Channels_HTML():
|
|||||||
for channel in sorted_channel_list:
|
for channel in sorted_channel_list:
|
||||||
sorted_chan_guide.append(channelslist[channel])
|
sorted_chan_guide.append(channelslist[channel])
|
||||||
|
|
||||||
return render_template('channels.html', session=session, request=request, fhdhr=self.fhdhr, channelslist=sorted_chan_guide, channels_dict=channels_dict, list=list)
|
return render_template('channels.html', request=request, session=session, fhdhr=self.fhdhr, channelslist=sorted_chan_guide, channels_dict=channels_dict, origin=origin, origin_methods=origin_methods, list=list)
|
||||||
|
|||||||
@ -1,52 +0,0 @@
|
|||||||
from flask import request, render_template, session
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
|
|
||||||
class Cluster_HTML():
|
|
||||||
endpoints = ["/cluster", "/cluster.html"]
|
|
||||||
endpoint_name = "page_cluster_html"
|
|
||||||
endpoint_access_level = 1
|
|
||||||
pretty_name = "Cluster/SSDP"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.location_dict = {
|
|
||||||
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
|
||||||
"location": self.fhdhr.api.base,
|
|
||||||
"joined": "N/A",
|
|
||||||
"url_query": self.fhdhr.api.base_quoted
|
|
||||||
}
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
locations_list = []
|
|
||||||
|
|
||||||
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
|
||||||
|
|
||||||
locations_list.append(self.location_dict)
|
|
||||||
|
|
||||||
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
|
||||||
for location in list(fhdhr_list.keys()):
|
|
||||||
|
|
||||||
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
|
||||||
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
location_info_url = "%s/discover.json" % location
|
|
||||||
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
|
||||||
location_info = location_info_req.json()
|
|
||||||
location_name = location_info["FriendlyName"]
|
|
||||||
except self.fhdhr.web.exceptions.ConnectionError:
|
|
||||||
self.fhdhr.logger.error("Unreachable: %s" % location)
|
|
||||||
location_dict = {
|
|
||||||
"name": location_name,
|
|
||||||
"location": location,
|
|
||||||
"joined": str(fhdhr_list[location]["Joined"]),
|
|
||||||
"url_query": urllib.parse.quote(location)
|
|
||||||
}
|
|
||||||
locations_list.append(location_dict)
|
|
||||||
|
|
||||||
return render_template('cluster.html', session=session, request=request, fhdhr=self.fhdhr, locations_list=locations_list)
|
|
||||||
@ -5,6 +5,7 @@ class Diagnostics_HTML():
|
|||||||
endpoints = ["/diagnostics", "/diagnostics.html"]
|
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||||
endpoint_name = "page_diagnostics_html"
|
endpoint_name = "page_diagnostics_html"
|
||||||
endpoint_access_level = 2
|
endpoint_access_level = 2
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "Diagnostics"
|
pretty_name = "Diagnostics"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -20,7 +21,7 @@ class Diagnostics_HTML():
|
|||||||
button_dict = {}
|
button_dict = {}
|
||||||
|
|
||||||
for route_group in list(session["route_list"].keys()):
|
for route_group in list(session["route_list"].keys()):
|
||||||
if route_group not in ["pages", "brython", "files"]:
|
if route_group not in ["pages", "brython", "files", "tool_pages"]:
|
||||||
button_dict[route_group] = []
|
button_dict[route_group] = []
|
||||||
for route_item in list(session["route_list"][route_group].keys()):
|
for route_item in list(session["route_list"][route_group].keys()):
|
||||||
if not session["route_list"][route_group][route_item]["name"].startswith("page_"):
|
if not session["route_list"][route_group][route_item]["name"].startswith("page_"):
|
||||||
@ -46,4 +47,4 @@ class Diagnostics_HTML():
|
|||||||
curr_button_dict["button"] = False
|
curr_button_dict["button"] = False
|
||||||
button_dict[route_group].append(curr_button_dict)
|
button_dict[route_group].append(curr_button_dict)
|
||||||
|
|
||||||
return render_template('diagnostics.html', session=session, request=request, fhdhr=self.fhdhr, button_dict=button_dict, list=list)
|
return render_template('diagnostics.html', request=request, session=session, fhdhr=self.fhdhr, button_dict=button_dict, list=list)
|
||||||
|
|||||||
@ -27,6 +27,9 @@ class Guide_HTML():
|
|||||||
if source not in epg_methods:
|
if source not in epg_methods:
|
||||||
source = self.fhdhr.device.epg.def_method
|
source = self.fhdhr.device.epg.def_method
|
||||||
|
|
||||||
|
if not source:
|
||||||
|
return render_template('guide.html', request=request, session=session, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods, source=source, list=list)
|
||||||
|
|
||||||
whatson = self.fhdhr.device.epg.whats_on_allchans(source)
|
whatson = self.fhdhr.device.epg.whats_on_allchans(source)
|
||||||
|
|
||||||
# Sort the channels
|
# Sort the channels
|
||||||
@ -60,14 +63,14 @@ class Guide_HTML():
|
|||||||
else:
|
else:
|
||||||
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
|
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
|
||||||
|
|
||||||
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
if source in self.fhdhr.origins.valid_origins:
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"])
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"], source)
|
||||||
|
|
||||||
chan_dict["name"] = chan_obj.dict["name"]
|
chan_dict["name"] = chan_obj.dict["name"]
|
||||||
chan_dict["number"] = chan_obj.number
|
chan_dict["number"] = chan_obj.number
|
||||||
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
||||||
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
||||||
chan_dict["m3u_url"] = chan_obj.m3u_url
|
chan_dict["m3u_url"] = chan_obj.api_m3u_url
|
||||||
|
|
||||||
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
|
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
|
||||||
else:
|
else:
|
||||||
@ -78,4 +81,4 @@ class Guide_HTML():
|
|||||||
|
|
||||||
chan_guide_list.append(chan_dict)
|
chan_guide_list.append(chan_dict)
|
||||||
|
|
||||||
return render_template('guide.html', session=session, request=request, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods, source=source)
|
return render_template('guide.html', request=request, session=session, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods, source=source, list=list)
|
||||||
|
|||||||
@ -15,15 +15,13 @@ class Index_HTML():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
origin = self.fhdhr.origins.valid_origins[0]
|
||||||
max_tuners = self.fhdhr.device.tuners.max_tuners
|
|
||||||
|
|
||||||
fhdhr_status_dict = {
|
fhdhr_status_dict = {
|
||||||
"Script Directory": str(self.fhdhr.config.internal["paths"]["script_dir"]),
|
"Script Directory": str(self.fhdhr.config.internal["paths"]["script_dir"]),
|
||||||
"Config File": str(self.fhdhr.config.config_file),
|
"Config File": str(self.fhdhr.config.config_file),
|
||||||
"Cache Path": str(self.fhdhr.config.internal["paths"]["cache_dir"]),
|
"Cache Path": str(self.fhdhr.config.internal["paths"]["cache_dir"]),
|
||||||
"Total Channels": len(self.fhdhr.device.channels.list),
|
"Total Channels": len(list(self.fhdhr.device.channels.list[origin].keys())),
|
||||||
"Tuner Usage": ("%s/%s" % (str(tuners_in_use), str(max_tuners))),
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return render_template('index.html', session=session, request=request, fhdhr=self.fhdhr, fhdhr_status_dict=fhdhr_status_dict, list=list)
|
return render_template('index.html', request=request, session=session, fhdhr=self.fhdhr, fhdhr_status_dict=fhdhr_status_dict, list=list)
|
||||||
|
|||||||
@ -5,6 +5,7 @@ class Settings_HTML():
|
|||||||
endpoints = ["/settings", "/settings.html"]
|
endpoints = ["/settings", "/settings.html"]
|
||||||
endpoint_name = "page_settings_html"
|
endpoint_name = "page_settings_html"
|
||||||
endpoint_access_level = 1
|
endpoint_access_level = 1
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "Settings"
|
pretty_name = "Settings"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -21,15 +22,12 @@ class Settings_HTML():
|
|||||||
|
|
||||||
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
||||||
real_config_section = config_section
|
|
||||||
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
|
||||||
real_config_section = "origin"
|
|
||||||
web_settings_dict[config_section][config_item] = {
|
web_settings_dict[config_section][config_item] = {
|
||||||
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
"value": self.fhdhr.config.dict[config_section][config_item],
|
||||||
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
||||||
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
||||||
}
|
}
|
||||||
if not len(web_settings_dict[config_section].keys()):
|
if not len(web_settings_dict[config_section].keys()):
|
||||||
del web_settings_dict[config_section]
|
del web_settings_dict[config_section]
|
||||||
|
|
||||||
return render_template('settings.html', session=session, request=request, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
return render_template('settings.html', request=request, session=session, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
||||||
|
|||||||
@ -1,18 +0,0 @@
|
|||||||
from flask import request, render_template, session
|
|
||||||
|
|
||||||
|
|
||||||
class Tools_HTML():
|
|
||||||
endpoints = ["/tools", "/tools.html"]
|
|
||||||
endpoint_name = "tools_html"
|
|
||||||
endpoint_access_level = 2
|
|
||||||
pretty_name = "Tools"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
return render_template('tools.html', session=session, request=request, fhdhr=self.fhdhr)
|
|
||||||
@ -7,6 +7,7 @@ class Tuners_HTML():
|
|||||||
endpoints = ["/tuners", "/tuners.html"]
|
endpoints = ["/tuners", "/tuners.html"]
|
||||||
endpoint_name = "page_streams_html"
|
endpoint_name = "page_streams_html"
|
||||||
endpoint_access_level = 0
|
endpoint_access_level = 0
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "Tuners"
|
pretty_name = "Tuners"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -17,22 +18,36 @@ class Tuners_HTML():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
tuner_list = []
|
tuner_status_dict = {}
|
||||||
|
|
||||||
tuner_status = self.fhdhr.device.tuners.status()
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
tuner_scanning = 0
|
for origin in list(tuner_status.keys()):
|
||||||
for tuner in list(tuner_status.keys()):
|
tuner_status_dict[origin] = {}
|
||||||
tuner_dict = {
|
tuner_status_dict[origin]["scan_count"] = 0
|
||||||
"number": str(tuner),
|
tuner_status_dict[origin]["status_list"] = []
|
||||||
"status": str(tuner_status[tuner]["status"]),
|
for tuner in list(tuner_status[origin].keys()):
|
||||||
}
|
if tuner_status[origin][tuner]["status"] == "Scanning":
|
||||||
if tuner_status[tuner]["status"] == "Active":
|
tuner_status_dict[origin]["scan_count"] += 1
|
||||||
tuner_dict["channel_number"] = tuner_status[tuner]["channel"]
|
|
||||||
tuner_dict["method"] = tuner_status[tuner]["method"]
|
|
||||||
tuner_dict["play_duration"] = str(tuner_status[tuner]["Play Time"])
|
|
||||||
tuner_dict["downloaded"] = humanized_filesize(tuner_status[tuner]["downloaded"])
|
|
||||||
elif tuner_status[tuner]["status"] == "Scanning":
|
|
||||||
tuner_scanning += 1
|
|
||||||
|
|
||||||
tuner_list.append(tuner_dict)
|
tuner_dict = {
|
||||||
|
"number": str(tuner),
|
||||||
|
"status": str(tuner_status[origin][tuner]["status"]),
|
||||||
|
"origin": "N/A",
|
||||||
|
"channel_number": "N/A",
|
||||||
|
"method": "N/A",
|
||||||
|
"running_time": "N/A",
|
||||||
|
"downloaded": "N/A",
|
||||||
|
}
|
||||||
|
|
||||||
return render_template('tuners.html', session=session, request=request, fhdhr=self.fhdhr, tuner_list=tuner_list, tuner_scanning=tuner_scanning)
|
if tuner_status[origin][tuner]["status"] in ["Active", "Acquired", "Scanning"]:
|
||||||
|
tuner_dict["origin"] = tuner_status[origin][tuner]["origin"]
|
||||||
|
tuner_dict["channel_number"] = tuner_status[origin][tuner]["channel"] or "N/A"
|
||||||
|
tuner_dict["running_time"] = str(tuner_status[origin][tuner]["running_time"])
|
||||||
|
|
||||||
|
if tuner_status[origin][tuner]["status"] in "Active":
|
||||||
|
tuner_dict["method"] = tuner_status[origin][tuner]["method"]
|
||||||
|
tuner_dict["downloaded"] = humanized_filesize(tuner_status[origin][tuner]["downloaded"])
|
||||||
|
|
||||||
|
tuner_status_dict[origin]["status_list"].append(tuner_dict)
|
||||||
|
|
||||||
|
return render_template('tuners.html', request=request, session=session, fhdhr=self.fhdhr, tuner_status_dict=tuner_status_dict, list=list)
|
||||||
|
|||||||
@ -5,6 +5,7 @@ class Version_HTML():
|
|||||||
endpoints = ["/version", "/version.html"]
|
endpoints = ["/version", "/version.html"]
|
||||||
endpoint_name = "page_version_html"
|
endpoint_name = "page_version_html"
|
||||||
endpoint_access_level = 1
|
endpoint_access_level = 1
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "Version"
|
pretty_name = "Version"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -14,7 +15,19 @@ class Version_HTML():
|
|||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
version_dict = {}
|
version_dict = {}
|
||||||
for key in list(self.fhdhr.config.internal["versions"].keys()):
|
for key in list(self.fhdhr.config.internal["versions"].keys()):
|
||||||
version_dict[key] = self.fhdhr.config.internal["versions"][key]
|
version_dict[key] = self.fhdhr.config.internal["versions"][key]
|
||||||
return render_template('version.html', session=session, request=request, fhdhr=self.fhdhr, version_dict=version_dict, list=list)
|
|
||||||
|
# Sort the Version Info
|
||||||
|
sorted_version_list = sorted(version_dict, key=lambda i: (version_dict[i]['type'], version_dict[i]['name']))
|
||||||
|
sorted_version_dict = {
|
||||||
|
"fHDHR": version_dict["fHDHR"],
|
||||||
|
"fHDHR_web": version_dict["fHDHR_web"]
|
||||||
|
}
|
||||||
|
for version_item in sorted_version_list:
|
||||||
|
if version_item not in ["fHDHR", "fHDHR_web"]:
|
||||||
|
sorted_version_dict[version_item] = version_dict[version_item]
|
||||||
|
|
||||||
|
return render_template('version.html', request=request, session=session, fhdhr=self.fhdhr, version_dict=sorted_version_dict, list=list)
|
||||||
|
|||||||
@ -5,6 +5,7 @@ class xmlTV_HTML():
|
|||||||
endpoints = ["/xmltv", "/xmltv.html"]
|
endpoints = ["/xmltv", "/xmltv.html"]
|
||||||
endpoint_name = "page_xmltv_html"
|
endpoint_name = "page_xmltv_html"
|
||||||
endpoint_access_level = 1
|
endpoint_access_level = 1
|
||||||
|
endpoint_category = "tool_pages"
|
||||||
pretty_name = "xmltv"
|
pretty_name = "xmltv"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
@ -15,4 +16,4 @@ class xmlTV_HTML():
|
|||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
return render_template('xmltv.html', session=session, request=request, fhdhr=self.fhdhr)
|
return render_template('xmltv.html', request=request, session=session, fhdhr=self.fhdhr, list=list)
|
||||||
|
|||||||
@ -1,58 +0,0 @@
|
|||||||
from flask import Response, request
|
|
||||||
from io import BytesIO
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class RMG_Device_XML():
|
|
||||||
endpoints = ["/rmg/device.xml"]
|
|
||||||
endpoint_name = "rmg_device_xml"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
"""Device.xml referenced from SSDP"""
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
|
||||||
|
|
||||||
out = xml.etree.ElementTree.Element('root')
|
|
||||||
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
|
||||||
|
|
||||||
specVersion_out = sub_el(out, 'specVersion')
|
|
||||||
sub_el(specVersion_out, 'major', "1")
|
|
||||||
sub_el(specVersion_out, 'minor', "0")
|
|
||||||
|
|
||||||
device_out = sub_el(out, 'device')
|
|
||||||
|
|
||||||
sub_el(device_out, 'deviceType', "urn:plex-tv:device:Media:1")
|
|
||||||
|
|
||||||
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
|
||||||
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
|
||||||
sub_el(device_out, 'manufacturerURL', "https://github.com/fHDHR/%s" % self.fhdhr.config.dict["main"]["reponame"])
|
|
||||||
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
|
||||||
sub_el(device_out, 'modelNumber', self.fhdhr.config.internal["versions"]["fHDHR"])
|
|
||||||
|
|
||||||
sub_el(device_out, 'modelDescription', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
|
||||||
sub_el(device_out, 'modelURL', "https://github.com/fHDHR/%s" % self.fhdhr.config.dict["main"]["reponame"])
|
|
||||||
|
|
||||||
serviceList_out = sub_el(device_out, 'serviceList')
|
|
||||||
service_out = sub_el(serviceList_out, 'service')
|
|
||||||
sub_el(out, 'URLBase', "%s" % base_url)
|
|
||||||
sub_el(service_out, 'serviceType', "urn:plex-tv:service:MediaGrabber:1")
|
|
||||||
sub_el(service_out, 'serviceId', "urn:plex-tv:serviceId:MediaGrabber")
|
|
||||||
|
|
||||||
sub_el(device_out, 'UDN', "uuid:%s" % self.fhdhr.config.dict["main"]["uuid"])
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=device_xml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
@ -1,47 +0,0 @@
|
|||||||
from flask import Response
|
|
||||||
from io import BytesIO
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class RMG_Devices_DeviceKey_Channels():
|
|
||||||
endpoints = ["/devices/<devicekey>/channels", "/rmg/devices/<devicekey>/channels"]
|
|
||||||
endpoint_name = "rmg_devices_devicekey_channels"
|
|
||||||
endpoint_methods = ["GET"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, devicekey, *args):
|
|
||||||
return self.get(devicekey, *args)
|
|
||||||
|
|
||||||
def get(self, devicekey, *args):
|
|
||||||
"""Returns the current channels."""
|
|
||||||
|
|
||||||
out = xml.etree.ElementTree.Element('MediaContainer')
|
|
||||||
if devicekey == self.fhdhr.config.dict["main"]["uuid"]:
|
|
||||||
out.set('size', str(len(self.fhdhr.device.channels.list)))
|
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
|
||||||
if channel_obj.enabled:
|
|
||||||
sub_el(out, 'Channel',
|
|
||||||
drm="0",
|
|
||||||
channelIdentifier=channel_obj.rmg_stream_ident,
|
|
||||||
name=channel_obj.dict["name"],
|
|
||||||
origin=channel_obj.dict["callsign"],
|
|
||||||
number=str(channel_obj.number),
|
|
||||||
type="tv",
|
|
||||||
# TODO param
|
|
||||||
signalStrength="100",
|
|
||||||
signalQuality="100",
|
|
||||||
)
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=device_xml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
@ -1,31 +0,0 @@
|
|||||||
from flask import request, redirect
|
|
||||||
import urllib.parse
|
|
||||||
|
|
||||||
|
|
||||||
class RMG_Devices_DeviceKey_Media():
|
|
||||||
endpoints = ["/devices/<devicekey>/media/<channel>", "/rmg/devices/<devicekey>/media/<channel>"]
|
|
||||||
endpoint_name = "rmg_devices_devicekey_media"
|
|
||||||
endpoint_methods = ["GET"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, devicekey, channel, *args):
|
|
||||||
return self.get(devicekey, channel, *args)
|
|
||||||
|
|
||||||
def get(self, devicekey, channel, *args):
|
|
||||||
|
|
||||||
param = request.args.get('method', default=None, type=str)
|
|
||||||
self.fhdhr.logger.debug("param:%s" % param)
|
|
||||||
|
|
||||||
method = self.fhdhr.config.dict["streaming"]["method"]
|
|
||||||
|
|
||||||
redirect_url = "/api/tuners?method=%s" % (method)
|
|
||||||
|
|
||||||
if str(channel).startswith('id://'):
|
|
||||||
channel = str(channel).replace('id://', '')
|
|
||||||
redirect_url += "&channel=%s" % str(channel)
|
|
||||||
|
|
||||||
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
|
||||||
|
|
||||||
return redirect(redirect_url)
|
|
||||||
@ -1,49 +0,0 @@
|
|||||||
from flask import Response, request
|
|
||||||
from io import BytesIO
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class RMG_Devices_Discover():
|
|
||||||
endpoints = ["/devices/discover", "/rmg/devices/discover"]
|
|
||||||
endpoint_name = "rmg_devices_discover"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
"""This endpoint requests the grabber attempt to discover any devices it can, and it returns zero or more devices."""
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
|
||||||
|
|
||||||
out = xml.etree.ElementTree.Element('MediaContainer')
|
|
||||||
out.set('size', "1")
|
|
||||||
sub_el(out, 'Device',
|
|
||||||
key=self.fhdhr.config.dict["main"]["uuid"],
|
|
||||||
make=self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
|
||||||
model=self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
|
||||||
modelNumber=self.fhdhr.config.internal["versions"]["fHDHR"],
|
|
||||||
protocol="livetv",
|
|
||||||
status="alive",
|
|
||||||
title=self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
|
||||||
tuners=str(self.fhdhr.config.dict["fhdhr"]["tuner_count"]),
|
|
||||||
uri=base_url,
|
|
||||||
uuid="device://tv.plex.grabbers.fHDHR/%s" % self.fhdhr.config.dict["main"]["uuid"],
|
|
||||||
thumb="favicon.ico",
|
|
||||||
interface='network'
|
|
||||||
# TODO add preferences
|
|
||||||
)
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=device_xml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
@ -1,54 +0,0 @@
|
|||||||
from flask import Response, request
|
|
||||||
from io import BytesIO
|
|
||||||
import xml.etree.ElementTree
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
|
||||||
|
|
||||||
|
|
||||||
class RMG_Devices_Probe():
|
|
||||||
endpoints = ["/devices/probe", "/rmg/devices/probe"]
|
|
||||||
endpoint_name = "rmg_devices_probe"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
endpoint_default_parameters = {
|
|
||||||
"uri": "<base_url>"
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
"""Probes a specific URI for a network device, and returns a device, if it exists at the given URI."""
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
|
||||||
|
|
||||||
uri = request.args.get('uri', default=None, type=str)
|
|
||||||
|
|
||||||
out = xml.etree.ElementTree.Element('MediaContainer')
|
|
||||||
out.set('size', "1")
|
|
||||||
if uri == base_url:
|
|
||||||
sub_el(out, 'Device',
|
|
||||||
key=self.fhdhr.config.dict["main"]["uuid"],
|
|
||||||
make=self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
|
||||||
model=self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
|
||||||
modelNumber=self.fhdhr.config.internal["versions"]["fHDHR"],
|
|
||||||
protocol="livetv",
|
|
||||||
status="alive",
|
|
||||||
title=self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
|
||||||
tuners=str(self.fhdhr.config.dict["fhdhr"]["tuner_count"]),
|
|
||||||
uri=base_url,
|
|
||||||
uuid="device://tv.plex.grabbers.fHDHR/%s" % self.fhdhr.config.dict["main"]["uuid"],
|
|
||||||
thumb="favicon.ico",
|
|
||||||
interface='network'
|
|
||||||
)
|
|
||||||
|
|
||||||
fakefile = BytesIO()
|
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
|
||||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
|
||||||
device_xml = fakefile.getvalue()
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=device_xml,
|
|
||||||
mimetype='application/xml')
|
|
||||||
@ -28,7 +28,6 @@
|
|||||||
|
|
||||||
<div>
|
<div>
|
||||||
<button onclick="location.href='/index'" type="button">fHDHR</button>
|
<button onclick="location.href='/index'" type="button">fHDHR</button>
|
||||||
<button onclick="location.href='/origin'" type="button">{{ fhdhr.config.dict["main"]["servicename"] }}</button>
|
|
||||||
|
|
||||||
{% for page_dict in session["route_list"]["pages"] %}
|
{% for page_dict in session["route_list"]["pages"] %}
|
||||||
{% if session["route_list"]["pages"][page_dict]["name"] != "page_index_html" and fhdhr.config.dict["web_ui"]["access_level"] >= session["route_list"]["pages"][page_dict]["endpoint_access_level"] %}
|
{% if session["route_list"]["pages"][page_dict]["name"] != "page_index_html" and fhdhr.config.dict["web_ui"]["access_level"] >= session["route_list"]["pages"][page_dict]["endpoint_access_level"] %}
|
||||||
@ -64,17 +63,11 @@
|
|||||||
<hr align="center" width="100%">
|
<hr align="center" width="100%">
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
{% if fhdhr.config.dict["web_ui"]["cluster_bar"] %}
|
{% for page_dict in session["route_list"]["tool_pages"] %}
|
||||||
{% set locations = fhdhr.device.cluster.get_cluster_dicts_web() %}
|
{% if session["route_list"]["tool_pages"][page_dict]["name"] != "page_index_html" and fhdhr.config.dict["web_ui"]["access_level"] >= session["route_list"]["tool_pages"][page_dict]["endpoint_access_level"] %}
|
||||||
{% if locations %}
|
<button onclick="location.href='{{ session["route_list"]["tool_pages"][page_dict]["endpoints"][0] }}'" type="button">{{ session["route_list"]["tool_pages"][page_dict]["pretty_name"] }}</button>
|
||||||
<div>
|
|
||||||
{% for location in locations %}
|
|
||||||
<button onclick="location.href='{{ location["base_url"] }}'" type="button">{{ location["name"] }}</button>
|
|
||||||
{% endfor %}
|
|
||||||
<hr align="center" width="100%">
|
|
||||||
</div>
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
{% endif %}
|
{% endfor %}
|
||||||
|
|
||||||
{% block content %}{% endblock %}
|
{% block content %}{% endblock %}
|
||||||
</body>
|
</body>
|
||||||
|
|||||||
@ -4,8 +4,14 @@
|
|||||||
|
|
||||||
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels</h4>
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels</h4>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
{% for origin in origin_methods %}
|
||||||
|
<button onclick="location.href='/channels?origin={{ origin }}'" type="button">{{ origin }}</button>
|
||||||
|
{% endfor %}
|
||||||
|
</p>
|
||||||
|
|
||||||
<div style="text-align: center;">
|
<div style="text-align: center;">
|
||||||
<button onclick="location.href='/api/tuners?method=scan&redirect=/channels'" type="button">Force Channel Update</button>
|
<button onclick="location.href='/api/tuners?method=scan&origin={{ origin }}&redirect=/channels?origin={{ origin }}'" type="button">Force Channel Update</button>
|
||||||
<p> Note: This may take some time.</p>
|
<p> Note: This may take some time.</p>
|
||||||
</div>
|
</div>
|
||||||
<br>
|
<br>
|
||||||
@ -25,7 +31,7 @@
|
|||||||
|
|
||||||
<br>
|
<br>
|
||||||
<div style="text-align: center;">
|
<div style="text-align: center;">
|
||||||
<button onclick="location.href='/channels_editor'" type="button">Edit Channels</button>
|
<button onclick="location.href='/channels_editor?origin={{ origin }}'" type="button">Edit Channels</button>
|
||||||
</div>
|
</div>
|
||||||
<br>
|
<br>
|
||||||
|
|
||||||
|
|||||||
@ -3,6 +3,7 @@
|
|||||||
{% block content %}
|
{% block content %}
|
||||||
|
|
||||||
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels Editor</h4>
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels Editor</h4>
|
||||||
|
<input type="hidden" id="origin" value="{{ origin }}">
|
||||||
|
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<table class="table-medium center">
|
<table class="table-medium center">
|
||||||
|
|||||||
@ -14,7 +14,7 @@
|
|||||||
<table class="table-scroll">
|
<table class="table-scroll">
|
||||||
<thead>
|
<thead>
|
||||||
<tr>
|
<tr>
|
||||||
{% if source in ["blocks", "origin", fhdhr.config.dict["main"]["dictpopname"]] %}
|
{% if source in fhdhr.origins.valid_origins %}
|
||||||
<th>Play</th>
|
<th>Play</th>
|
||||||
{% endif %}
|
{% endif %}
|
||||||
<th>Channel Name</th>
|
<th>Channel Name</th>
|
||||||
@ -32,7 +32,7 @@
|
|||||||
<tbody class="body-half-screen">
|
<tbody class="body-half-screen">
|
||||||
{% for chan_dict in chan_guide_list %}
|
{% for chan_dict in chan_guide_list %}
|
||||||
<tr>
|
<tr>
|
||||||
{% if source in ["blocks", "origin", fhdhr.config.dict["main"]["dictpopname"]] %}
|
{% if source in fhdhr.origins.valid_origins %}
|
||||||
<td>
|
<td>
|
||||||
{% if chan_dict["enabled"] %}
|
{% if chan_dict["enabled"] %}
|
||||||
<a href="{{ chan_dict["m3u_url"] }}">Play</a>
|
<a href="{{ chan_dict["m3u_url"] }}">Play</a>
|
||||||
|
|||||||
@ -14,11 +14,8 @@
|
|||||||
|
|
||||||
{% for config_section in list(web_settings_dict.keys()) %}
|
{% for config_section in list(web_settings_dict.keys()) %}
|
||||||
|
|
||||||
{% if config_section == "origin" %}
|
<h4 style="text-align: center;">{{ config_section }}</h4>
|
||||||
<h4 style="text-align: center;">{{ fhdhr.config.dict["main"]["dictpopname"] }}</h4>
|
|
||||||
{% else %}
|
|
||||||
<h4 style="text-align: center;">{{ config_section }}</h4>
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
<div class="container">
|
<div class="container">
|
||||||
<table class="table-settings center action-col text-edit-cols">
|
<table class="table-settings center action-col text-edit-cols">
|
||||||
|
|||||||
@ -4,49 +4,48 @@
|
|||||||
|
|
||||||
<h4 style="text-align: center;">fHDHR Streams</h4>
|
<h4 style="text-align: center;">fHDHR Streams</h4>
|
||||||
|
|
||||||
<div class="container">
|
{% for origin in list(tuner_status_dict.keys()) %}
|
||||||
<table class="table-medium center action-col">
|
|
||||||
<tbody>
|
|
||||||
<tr>
|
|
||||||
<th>Tuner</th>
|
|
||||||
<th>Status</th>
|
|
||||||
<th>Channel</th>
|
|
||||||
<th>Method</th>
|
|
||||||
<th>Time Active</th>
|
|
||||||
<th>Total Downloaded</th>
|
|
||||||
<th>Actions</th>
|
|
||||||
</tr>
|
|
||||||
|
|
||||||
{% for tuner_dict in tuner_list %}
|
<h4 style="text-align: center;">{{ origin }}</h4>
|
||||||
|
|
||||||
|
<div class="container">
|
||||||
|
<table class="table-medium center action-col">
|
||||||
|
<tbody>
|
||||||
<tr>
|
<tr>
|
||||||
<td>{{ tuner_dict["number"] }}</td>
|
<th>Tuner</th>
|
||||||
<td>{{ tuner_dict["status"] }}</td>
|
<th>Status</th>
|
||||||
{% if tuner_dict["status"] in ["Active", "Acquired"] %}
|
<th>Origin</th>
|
||||||
<td>{{ tuner_dict["channel_number"] }}</td>
|
<th>Channel</th>
|
||||||
{% else %}
|
<th>Method</th>
|
||||||
<td>N/A</td>
|
<th>Time Active</th>
|
||||||
{% endif %}
|
<th>Total Downloaded</th>
|
||||||
{% if tuner_dict["status"] == "Active" %}
|
<th>Actions</th>
|
||||||
<td>{{ tuner_dict["method"] }}</td>
|
|
||||||
<td>{{ tuner_dict["play_duration"] }}</td>
|
|
||||||
<td>{{ tuner_dict["downloaded"] }}</td>
|
|
||||||
{% else %}
|
|
||||||
<td>N/A</td>
|
|
||||||
<td>N/A</td>
|
|
||||||
<td>N/A</td>
|
|
||||||
{% endif %}
|
|
||||||
<td>
|
|
||||||
{% if tuner_dict["status"] != "Inactive" %}
|
|
||||||
<button onclick="location.href='/api/tuners?method=close&tuner={{ tuner_dict["number"] }}&redirect=/tuners'" type="button">Close</button>
|
|
||||||
{% endif %}
|
|
||||||
{% if not tuner_scanning and tuner_dict["status"] == "Inactive" %}
|
|
||||||
<button onclick="location.href='/api/tuners?method=scan&tuner={{ tuner_dict["number"] }}&redirect=/tuners'" type="button">Channel Scan</button>
|
|
||||||
{% endif %}
|
|
||||||
</td>
|
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
|
||||||
</tbody>
|
{% for tuner_dict in tuner_status_dict[origin]["status_list"] %}
|
||||||
</table>
|
<tr>
|
||||||
</div>
|
<td>{{ tuner_dict["number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["status"] }}</td>
|
||||||
|
<td>{{ tuner_dict["origin"] }}</td>
|
||||||
|
<td>{{ tuner_dict["channel_number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["method"] }}</td>
|
||||||
|
<td>{{ tuner_dict["running_time"] }}</td>
|
||||||
|
<td>{{ tuner_dict["downloaded"] }}</td>
|
||||||
|
|
||||||
|
<td>
|
||||||
|
{% if tuner_dict["status"] != "Inactive" %}
|
||||||
|
<button onclick="location.href='/api/tuners?method=close&tuner={{ tuner_dict["number"] }}&origin={{ origin }}&redirect=/tuners'" type="button">Close</button>
|
||||||
|
{% endif %}
|
||||||
|
{% if not tuner_status_dict[origin]["scan_count"] and tuner_dict["status"] == "Inactive" %}
|
||||||
|
<button onclick="location.href='/api/tuners?method=scan&tuner={{ tuner_dict["number"] }}&origin={{ origin }}&redirect=/tuners'" type="button">Channel Scan</button>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
</tbody>
|
||||||
|
</table>
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
{% endblock %}
|
{% endblock %}
|
||||||
|
|||||||
@ -10,8 +10,9 @@
|
|||||||
<tbody>
|
<tbody>
|
||||||
{% for key in list(version_dict.keys()) %}
|
{% for key in list(version_dict.keys()) %}
|
||||||
<tr>
|
<tr>
|
||||||
|
<td>{{ version_dict[key]["type"] }}</td>
|
||||||
<td>{{ key }}</td>
|
<td>{{ key }}</td>
|
||||||
<td>{{ version_dict[key] }}</td>
|
<td>{{ version_dict[key]["version"] }}</td>
|
||||||
</tr>
|
</tr>
|
||||||
{% endfor %}
|
{% endfor %}
|
||||||
</tbody>
|
</tbody>
|
||||||
|
|||||||
@ -14,16 +14,12 @@
|
|||||||
<th>Actions</th>
|
<th>Actions</th>
|
||||||
</tr>
|
</tr>
|
||||||
|
|
||||||
{% for epg_method in fhdhr.config.dict["epg"]["valid_epg_methods"] %}
|
{% for epg_method in list(fhdhr.config.dict["epg"]["valid_methods"].keys()) %}
|
||||||
{% if epg_method not in [None, "None"] %}
|
{% if epg_method not in [None, "None"] %}
|
||||||
{% set epg_method_name = epg_method %}
|
|
||||||
{% if epg_method == "origin" %}
|
|
||||||
{% set epg_method_name = fhdhr.config.dict["main"]["dictpopname"] %}
|
|
||||||
{% endif %}
|
|
||||||
<tr>
|
<tr>
|
||||||
<td> {{ epg_method_name }}</td>
|
<td> {{ epg_method }}</td>
|
||||||
<td><a href="/api/xmltv?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
<td><a href="/api/xmltv?method=get&source={{ epg_method }}">{{ epg_method }}</a></td>
|
||||||
<td><a href="/api/epg?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
<td><a href="/api/epg?method=get&source={{ epg_method }}">{{ epg_method }}</a></td>
|
||||||
<td>
|
<td>
|
||||||
<button onclick="location.href='/api/xmltv?method=update&source={{ epg_method }}&redirect=/xmltv'" type="button">Update</button>
|
<button onclick="location.href='/api/xmltv?method=update&source={{ epg_method }}&redirect=/xmltv'" type="button">Update</button>
|
||||||
<button onclick="location.href='/api/xmltv?method=clearcache&source={{ epg_method }}&redirect=/xmltv'" type="button">Clear Cache</button>
|
<button onclick="location.href='/api/xmltv?method=clearcache&source={{ epg_method }}&redirect=/xmltv'" type="button">Clear Cache</button>
|
||||||
|
|||||||
@ -6,13 +6,7 @@
|
|||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
"access_level":{
|
"access_level":{
|
||||||
"value": 0,
|
"value": "0",
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
,
|
|
||||||
"cluster_bar":{
|
|
||||||
"value": true,
|
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
}
|
}
|
||||||
|
|||||||
@ -136,13 +136,13 @@ td {border: 1px solid black;}
|
|||||||
|
|
||||||
/* Other options */
|
/* Other options */
|
||||||
|
|
||||||
.table-scroll.text-edit-cols td:nth-child(-n+4),
|
.table-scroll.text-edit-cols td:nth-of-type(-n+4),
|
||||||
.table-scroll.text-edit-cols th:nth-child(-n+4){
|
.table-scroll.text-edit-cols th:nth-of-type(-n+4){
|
||||||
flex: 2
|
flex: 2
|
||||||
}
|
}
|
||||||
|
|
||||||
.table-settings.text-edit-cols td:nth-child(-n+2),
|
.table-settings.text-edit-cols td:nth-of-type(-n+2),
|
||||||
.table-settings.text-edit-cols th:nth-child(-n+2){
|
.table-settings.text-edit-cols th:nth-of-type(-n+2){
|
||||||
flex: 1;
|
flex: 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
4
main.py
4
main.py
@ -9,10 +9,8 @@ import pathlib
|
|||||||
|
|
||||||
from fHDHR.cli import run
|
from fHDHR.cli import run
|
||||||
import fHDHR_web
|
import fHDHR_web
|
||||||
import alternative_epg
|
|
||||||
import origin
|
|
||||||
|
|
||||||
SCRIPT_DIR = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
SCRIPT_DIR = pathlib.Path(os.path.dirname(os.path.abspath(__file__)))
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
sys.exit(run.main(SCRIPT_DIR, fHDHR_web, origin, alternative_epg))
|
sys.exit(run.main(SCRIPT_DIR, fHDHR_web))
|
||||||
|
|||||||
@ -1,8 +0,0 @@
|
|||||||
# pylama:ignore=W0401,W0611
|
|
||||||
from .origin_service import *
|
|
||||||
from .origin_channels import *
|
|
||||||
from .origin_epg import *
|
|
||||||
from .origin_web import *
|
|
||||||
|
|
||||||
ORIGIN_NAME = "fHDHR_Locast"
|
|
||||||
ORIGIN_VERSION = "v0.5.0-beta"
|
|
||||||
@ -1,64 +0,0 @@
|
|||||||
import xmltodict
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class OriginChannels():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, origin):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.origin = origin
|
|
||||||
|
|
||||||
def get_channel_thumbnail(self, channel_id):
|
|
||||||
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
|
||||||
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.fhdhr.config.dict["origin"]["address"],
|
|
||||||
str(self.fhdhr.config.dict["origin"]["port"]),
|
|
||||||
str(channel_id)
|
|
||||||
))
|
|
||||||
return channel_thumb_url
|
|
||||||
|
|
||||||
def get_channels(self):
|
|
||||||
|
|
||||||
data_url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
|
||||||
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.fhdhr.config.dict["origin"]["address"],
|
|
||||||
str(self.fhdhr.config.dict["origin"]["port"]),
|
|
||||||
self.origin.sid
|
|
||||||
))
|
|
||||||
|
|
||||||
data_req = self.fhdhr.web.session.get(data_url)
|
|
||||||
data_dict = xmltodict.parse(data_req.content)
|
|
||||||
|
|
||||||
if 'channels' not in list(data_dict['rsp'].keys()):
|
|
||||||
self.fhdhr.logger.error("Could not retrieve channel list")
|
|
||||||
return []
|
|
||||||
|
|
||||||
channel_o_list = data_dict['rsp']['channels']['channel']
|
|
||||||
|
|
||||||
channel_list = []
|
|
||||||
for c in channel_o_list:
|
|
||||||
dString = json.dumps(c)
|
|
||||||
channel_dict = eval(dString)
|
|
||||||
|
|
||||||
clean_station_item = {
|
|
||||||
"name": channel_dict["name"],
|
|
||||||
"callsign": channel_dict["name"],
|
|
||||||
"number": channel_dict["formatted-number"],
|
|
||||||
"id": channel_dict["id"],
|
|
||||||
"thumbnail": self.get_channel_thumbnail(channel_dict["id"])
|
|
||||||
}
|
|
||||||
channel_list.append(clean_station_item)
|
|
||||||
return channel_list
|
|
||||||
|
|
||||||
def get_channel_stream(self, chandict, stream_args):
|
|
||||||
streamurl = ('%s%s:%s/live?channel_id=%s&client=%s' %
|
|
||||||
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.fhdhr.config.dict["origin"]["address"],
|
|
||||||
str(self.fhdhr.config.dict["origin"]["port"]),
|
|
||||||
str(chandict["origin_id"]),
|
|
||||||
"fhdhr_%s" % chandict["origin_number"],
|
|
||||||
))
|
|
||||||
|
|
||||||
stream_info = {"url": streamurl}
|
|
||||||
|
|
||||||
return stream_info
|
|
||||||
@ -1,76 +0,0 @@
|
|||||||
{
|
|
||||||
"main":{
|
|
||||||
"servicename":{
|
|
||||||
"value": "NextPVR",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"dictpopname":{
|
|
||||||
"value": "nextpvr",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"reponame":{
|
|
||||||
"value": "fHDHR_NextPVR",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"required":{
|
|
||||||
"value": "nextpvr/pin",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"fhdhr":{
|
|
||||||
"friendlyname":{
|
|
||||||
"value": "fHDHR-NextPVR",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reporting_firmware_name":{
|
|
||||||
"value": "fHDHR_NextPVR",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"epg":{
|
|
||||||
"method":{
|
|
||||||
"value": "origin",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"valid_epg_methods":{
|
|
||||||
"value": "None,blocks,origin,zap2it,tvtv",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"nextpvr":{
|
|
||||||
"address":{
|
|
||||||
"value": "localhost",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"port":{
|
|
||||||
"value": 8866,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"ssl":{
|
|
||||||
"value": false,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"pin":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true,
|
|
||||||
"config_web_hidden": true
|
|
||||||
},
|
|
||||||
"sid":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,55 +0,0 @@
|
|||||||
import xmltodict
|
|
||||||
import hashlib
|
|
||||||
|
|
||||||
import fHDHR.tools
|
|
||||||
import fHDHR.exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class OriginService():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.nextpvr_address = ('%s%s:%s' %
|
|
||||||
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
|
||||||
self.fhdhr.config.dict["origin"]["address"],
|
|
||||||
str(self.fhdhr.config.dict["origin"]["port"]),
|
|
||||||
))
|
|
||||||
|
|
||||||
self.login()
|
|
||||||
|
|
||||||
def login(self):
|
|
||||||
self.fhdhr.logger.info("Logging into NextPVR")
|
|
||||||
self.sid = self.get_sid()
|
|
||||||
if not self.sid:
|
|
||||||
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.info("NextPVR Login Success")
|
|
||||||
self.fhdhr.config.write(self.fhdhr.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
|
||||||
|
|
||||||
def get_sid(self):
|
|
||||||
if self.fhdhr.config.dict["origin"]["sid"]:
|
|
||||||
return self.fhdhr.config.dict["origin"]["sid"]
|
|
||||||
|
|
||||||
initiate_url = '%s/service?method=session.initiate&ver=1.0&device=fhdhr' % self.nextpvr_address
|
|
||||||
|
|
||||||
initiate_req = self.fhdhr.web.session.get(initiate_url)
|
|
||||||
initiate_dict = xmltodict.parse(initiate_req.content)
|
|
||||||
|
|
||||||
sid = initiate_dict['rsp']['sid']
|
|
||||||
salt = initiate_dict['rsp']['salt']
|
|
||||||
md5PIN = hashlib.md5(str(self.fhdhr.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
|
||||||
string = ':%s:%s' % (md5PIN, salt)
|
|
||||||
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
|
||||||
|
|
||||||
login_url = ('%s/service?method=session.login&sid=%s&md5=%s' %
|
|
||||||
(self.nextpvr_address, sid, clientKey))
|
|
||||||
login_req = self.fhdhr.web.session.get(login_url)
|
|
||||||
login_dict = xmltodict.parse(login_req.content)
|
|
||||||
|
|
||||||
loginsuccess = None
|
|
||||||
if login_dict['rsp']['@stat'] == "ok":
|
|
||||||
if login_dict['rsp']['allow_watch'] == "true":
|
|
||||||
loginsuccess = sid
|
|
||||||
|
|
||||||
return loginsuccess
|
|
||||||
@ -1,12 +0,0 @@
|
|||||||
|
|
||||||
from .origin_api import Origin_API
|
|
||||||
from .origin_html import Origin_HTML
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Origin_Web():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.origin_api = Origin_API(fhdhr)
|
|
||||||
self.origin_html = Origin_HTML(fhdhr)
|
|
||||||
@ -1,16 +0,0 @@
|
|||||||
|
|
||||||
|
|
||||||
class Origin_API():
|
|
||||||
endpoints = ["/api/origin"]
|
|
||||||
endpoint_name = "api_origin"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
return "Success"
|
|
||||||
@ -1,30 +0,0 @@
|
|||||||
from flask import request, render_template_string
|
|
||||||
import pathlib
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
|
|
||||||
class Origin_HTML():
|
|
||||||
endpoints = ["/origin", "/origin.html"]
|
|
||||||
endpoint_name = "page_origin_html"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.template_file = pathlib.Path(self.fhdhr.config.internal["paths"]["origin_web"]).joinpath('origin.html')
|
|
||||||
self.template = StringIO()
|
|
||||||
self.template.write(open(self.template_file).read())
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
if self.fhdhr.originwrapper.setup_success:
|
|
||||||
origin_status_dict = {
|
|
||||||
"Setup": "Success",
|
|
||||||
"Address": self.fhdhr.originwrapper.originservice.nextpvr_address,
|
|
||||||
"Total Channels": len(self.fhdhr.device.channels.list)
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
origin_status_dict = {"Setup": "Failed"}
|
|
||||||
return render_template_string(self.template.getvalue(), request=request, fhdhr=self.fhdhr, origin_status_dict=origin_status_dict, list=list)
|
|
||||||
@ -3,20 +3,20 @@ import datetime
|
|||||||
from fHDHR.exceptions import EPGSetupError
|
from fHDHR.exceptions import EPGSetupError
|
||||||
|
|
||||||
|
|
||||||
class tvtvEPG():
|
class Plugin_OBJ():
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels):
|
def __init__(self, channels, plugin_utils):
|
||||||
self.fhdhr = fhdhr
|
self.plugin_utils = plugin_utils
|
||||||
|
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def postalcode(self):
|
def postalcode(self):
|
||||||
if self.fhdhr.config.dict["tvtv"]["postalcode"]:
|
if self.plugin_utils.config.dict["tvtv"]["postalcode"]:
|
||||||
return self.fhdhr.config.dict["tvtv"]["postalcode"]
|
return self.plugin_utils.config.dict["tvtv"]["postalcode"]
|
||||||
try:
|
try:
|
||||||
postalcode_url = 'http://ipinfo.io/json'
|
postalcode_url = 'http://ipinfo.io/json'
|
||||||
postalcode_req = self.fhdhr.web.session.get(postalcode_url)
|
postalcode_req = self.plugin_utils.web.session.get(postalcode_url)
|
||||||
data = postalcode_req.json()
|
data = postalcode_req.json()
|
||||||
postalcode = data["postal"]
|
postalcode = data["postal"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -27,9 +27,9 @@ class tvtvEPG():
|
|||||||
@property
|
@property
|
||||||
def lineup_id(self):
|
def lineup_id(self):
|
||||||
lineup_id_url = "https://www.tvtv.us/tvm/t/tv/v4/lineups?postalCode=%s" % self.postalcode
|
lineup_id_url = "https://www.tvtv.us/tvm/t/tv/v4/lineups?postalCode=%s" % self.postalcode
|
||||||
if self.fhdhr.config.dict["tvtv"]["lineuptype"]:
|
if self.plugin_utils.config.dict["tvtv"]["lineuptype"]:
|
||||||
lineup_id_url += "&lineupType=%s" % self.fhdhr.config.dict["tvtv"]["lineuptype"]
|
lineup_id_url += "&lineupType=%s" % self.plugin_utils.config.dict["tvtv"]["lineuptype"]
|
||||||
lineup_id_req = self.fhdhr.web.session.get(lineup_id_url)
|
lineup_id_req = self.plugin_utils.web.session.get(lineup_id_url)
|
||||||
data = lineup_id_req.json()
|
data = lineup_id_req.json()
|
||||||
lineup_id = data[0]["lineupID"]
|
lineup_id = data[0]["lineupID"]
|
||||||
return lineup_id
|
return lineup_id
|
||||||
@ -113,43 +113,43 @@ class tvtvEPG():
|
|||||||
stoptime = "%s%s" % (datesdict["stop"], "T00%3A00%3A00.000Z")
|
stoptime = "%s%s" % (datesdict["stop"], "T00%3A00%3A00.000Z")
|
||||||
url = "https://www.tvtv.us/tvm/t/tv/v4/lineups/%s/listings/grid?start=%s&end=%s" % (self.lineup_id, starttime, stoptime)
|
url = "https://www.tvtv.us/tvm/t/tv/v4/lineups/%s/listings/grid?start=%s&end=%s" % (self.lineup_id, starttime, stoptime)
|
||||||
self.get_cached_item(str(datesdict["start"]), url)
|
self.get_cached_item(str(datesdict["start"]), url)
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "tvtv") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "tvtv") or []
|
||||||
return [self.fhdhr.db.get_cacheitem_value(x, "epg_cache", "tvtv") for x in cache_list]
|
return [self.plugin_utils.db.get_plugin_value(x, "epg_cache", "tvtv") for x in cache_list]
|
||||||
|
|
||||||
def get_cached_item(self, cache_key, url):
|
def get_cached_item(self, cache_key, url):
|
||||||
cacheitem = self.fhdhr.db.get_cacheitem_value(cache_key, "epg_cache", "tvtv")
|
cacheitem = self.plugin_utils.db.get_plugin_value(cache_key, "epg_cache", "tvtv")
|
||||||
if cacheitem:
|
if cacheitem:
|
||||||
self.fhdhr.logger.info("FROM CACHE: %s" % cache_key)
|
self.plugin_utils.logger.info("FROM CACHE: %s" % cache_key)
|
||||||
return cacheitem
|
return cacheitem
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.info("Fetching: %s" % url)
|
self.plugin_utils.logger.info("Fetching: %s" % url)
|
||||||
try:
|
try:
|
||||||
resp = self.fhdhr.web.session.get(url)
|
resp = self.plugin_utils.web.session.get(url)
|
||||||
except self.fhdhr.web.exceptions.HTTPError:
|
except self.plugin_utils.web.exceptions.HTTPError:
|
||||||
self.fhdhr.logger.info('Got an error! Ignoring it.')
|
self.plugin_utils.logger.info('Got an error! Ignoring it.')
|
||||||
return
|
return
|
||||||
result = resp.json()
|
result = resp.json()
|
||||||
|
|
||||||
self.fhdhr.db.set_cacheitem_value(cache_key, "epg_cache", result, "tvtv")
|
self.plugin_utils.db.set_plugin_value(cache_key, "epg_cache", result, "tvtv")
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "tvtv") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "tvtv") or []
|
||||||
cache_list.append(cache_key)
|
cache_list.append(cache_key)
|
||||||
self.fhdhr.db.set_cacheitem_value("cache_list", "epg_cache", cache_list, "tvtv")
|
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", cache_list, "tvtv")
|
||||||
|
|
||||||
def remove_stale_cache(self, todaydate):
|
def remove_stale_cache(self, todaydate):
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "tvtv") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "tvtv") or []
|
||||||
cache_to_kill = []
|
cache_to_kill = []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
cachedate = datetime.datetime.strptime(str(cacheitem), "%Y-%m-%d")
|
cachedate = datetime.datetime.strptime(str(cacheitem), "%Y-%m-%d")
|
||||||
todaysdate = datetime.datetime.strptime(str(todaydate), "%Y-%m-%d")
|
todaysdate = datetime.datetime.strptime(str(todaydate), "%Y-%m-%d")
|
||||||
if cachedate < todaysdate:
|
if cachedate < todaysdate:
|
||||||
cache_to_kill.append(cacheitem)
|
cache_to_kill.append(cacheitem)
|
||||||
self.fhdhr.db.delete_cacheitem_value(cacheitem, "epg_cache", "tvtv")
|
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "tvtv")
|
||||||
self.fhdhr.logger.info("Removing stale cache: %s" % cacheitem)
|
self.plugin_utils.logger.info("Removing stale cache: %s" % cacheitem)
|
||||||
self.fhdhr.db.set_cacheitem_value("cache_list", "epg_cache", [x for x in cache_list if x not in cache_to_kill], "tvtv")
|
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", [x for x in cache_list if x not in cache_to_kill], "tvtv")
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "tvtv") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "tvtv") or []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
self.fhdhr.db.delete_cacheitem_value(cacheitem, "epg_cache", "tvtv")
|
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "tvtv")
|
||||||
self.fhdhr.logger.info("Removing cache: %s" % str(cacheitem))
|
self.plugin_utils.logger.info("Removing cache: %s" % str(cacheitem))
|
||||||
self.fhdhr.db.delete_cacheitem_value("cache_list", "epg_cache", "tvtv")
|
self.plugin_utils.db.delete_plugin_value("cache_list", "epg_cache", "tvtv")
|
||||||
5
plugins/fHDHR_plugin_epg_tvtv/plugin.json
Normal file
5
plugins/fHDHR_plugin_epg_tvtv/plugin.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"name":"tvtv",
|
||||||
|
"version":"v0.6.0-beta",
|
||||||
|
"type":"alt_epg"
|
||||||
|
}
|
||||||
@ -5,20 +5,20 @@ from fHDHR.tools import xmldictmaker
|
|||||||
from fHDHR.exceptions import EPGSetupError
|
from fHDHR.exceptions import EPGSetupError
|
||||||
|
|
||||||
|
|
||||||
class zap2itEPG():
|
class Plugin_OBJ():
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels):
|
def __init__(self, channels, plugin_utils):
|
||||||
self.fhdhr = fhdhr
|
self.plugin_utils = plugin_utils
|
||||||
|
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def postalcode(self):
|
def postalcode(self):
|
||||||
if self.fhdhr.config.dict["zap2it"]["postalcode"]:
|
if self.plugin_utils.config.dict["zap2it"]["postalcode"]:
|
||||||
return self.fhdhr.config.dict["zap2it"]["postalcode"]
|
return self.plugin_utils.config.dict["zap2it"]["postalcode"]
|
||||||
try:
|
try:
|
||||||
postalcode_url = 'http://ipinfo.io/json'
|
postalcode_url = 'http://ipinfo.io/json'
|
||||||
postalcode_req = self.fhdhr.web.session.get(postalcode_url)
|
postalcode_req = self.plugin_utils.web.session.get(postalcode_url)
|
||||||
data = postalcode_req.json()
|
data = postalcode_req.json()
|
||||||
postalcode = data["postal"]
|
postalcode = data["postal"]
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
@ -32,12 +32,12 @@ class zap2itEPG():
|
|||||||
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
||||||
zap_time = datetime.datetime.utcnow().timestamp()
|
zap_time = datetime.datetime.utcnow().timestamp()
|
||||||
self.remove_stale_cache(zap_time)
|
self.remove_stale_cache(zap_time)
|
||||||
zap_time_window = int(self.fhdhr.config.dict["zap2it"]["timespan"]) * 3600
|
zap_time_window = int(self.plugin_utils.config.dict["zap2it"]["timespan"]) * 3600
|
||||||
zap_time = int(zap_time - (zap_time % zap_time_window))
|
zap_time = int(zap_time - (zap_time % zap_time_window))
|
||||||
|
|
||||||
# Fetch data in `zap_timespan` chunks.
|
# Fetch data in `zap_timespan` chunks.
|
||||||
i_times = []
|
i_times = []
|
||||||
for i in range(int(7 * 24 / int(self.fhdhr.config.dict["zap2it"]["timespan"]))):
|
for i in range(int(7 * 24 / int(self.plugin_utils.config.dict["zap2it"]["timespan"]))):
|
||||||
i_times.append(zap_time + (i * zap_time_window))
|
i_times.append(zap_time + (i * zap_time_window))
|
||||||
|
|
||||||
cached_items = self.get_cached(i_times)
|
cached_items = self.get_cached(i_times)
|
||||||
@ -111,18 +111,18 @@ class zap2itEPG():
|
|||||||
for i_time in i_times:
|
for i_time in i_times:
|
||||||
|
|
||||||
parameters = {
|
parameters = {
|
||||||
'aid': self.fhdhr.config.dict["zap2it"]['affiliate_id'],
|
'aid': self.plugin_utils.config.dict["zap2it"]['affiliate_id'],
|
||||||
'country': self.fhdhr.config.dict["zap2it"]['country'],
|
'country': self.plugin_utils.config.dict["zap2it"]['country'],
|
||||||
'device': self.fhdhr.config.dict["zap2it"]['device'],
|
'device': self.plugin_utils.config.dict["zap2it"]['device'],
|
||||||
'headendId': self.fhdhr.config.dict["zap2it"]['headendid'],
|
'headendId': self.plugin_utils.config.dict["zap2it"]['headendid'],
|
||||||
'isoverride': "true",
|
'isoverride': "true",
|
||||||
'languagecode': self.fhdhr.config.dict["zap2it"]['languagecode'],
|
'languagecode': self.plugin_utils.config.dict["zap2it"]['languagecode'],
|
||||||
'pref': 'm,p',
|
'pref': 'm,p',
|
||||||
'timespan': self.fhdhr.config.dict["zap2it"]['timespan'],
|
'timespan': self.plugin_utils.config.dict["zap2it"]['timespan'],
|
||||||
'timezone': self.fhdhr.config.dict["zap2it"]['timezone'],
|
'timezone': self.plugin_utils.config.dict["zap2it"]['timezone'],
|
||||||
'userId': self.fhdhr.config.dict["zap2it"]['userid'],
|
'userId': self.plugin_utils.config.dict["zap2it"]['userid'],
|
||||||
'postalCode': str(self.postalcode),
|
'postalCode': str(self.postalcode),
|
||||||
'lineupId': '%s-%s-DEFAULT' % (self.fhdhr.config.dict["zap2it"]['country'], self.fhdhr.config.dict["zap2it"]['device']),
|
'lineupId': '%s-%s-DEFAULT' % (self.plugin_utils.config.dict["zap2it"]['country'], self.plugin_utils.config.dict["zap2it"]['device']),
|
||||||
'time': i_time,
|
'time': i_time,
|
||||||
'Activity_ID': 1,
|
'Activity_ID': 1,
|
||||||
'FromPage': "TV%20Guide",
|
'FromPage': "TV%20Guide",
|
||||||
@ -131,43 +131,43 @@ class zap2itEPG():
|
|||||||
url = 'https://tvlistings.zap2it.com/api/grid?'
|
url = 'https://tvlistings.zap2it.com/api/grid?'
|
||||||
url += urllib.parse.urlencode(parameters)
|
url += urllib.parse.urlencode(parameters)
|
||||||
self.get_cached_item(str(i_time), url)
|
self.get_cached_item(str(i_time), url)
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
||||||
return [self.fhdhr.db.get_cacheitem_value(x, "epg_cache", "zap2it") for x in cache_list]
|
return [self.plugin_utils.db.get_plugin_value(x, "epg_cache", "zap2it") for x in cache_list]
|
||||||
|
|
||||||
def get_cached_item(self, cache_key, url):
|
def get_cached_item(self, cache_key, url):
|
||||||
cacheitem = self.fhdhr.db.get_cacheitem_value(cache_key, "epg_cache", "zap2it")
|
cacheitem = self.plugin_utils.db.get_plugin_value(cache_key, "epg_cache", "zap2it")
|
||||||
if cacheitem:
|
if cacheitem:
|
||||||
self.fhdhr.logger.info("FROM CACHE: %s" % cache_key)
|
self.plugin_utils.logger.info("FROM CACHE: %s" % cache_key)
|
||||||
return cacheitem
|
return cacheitem
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.info("Fetching: %s" % url)
|
self.plugin_utils.logger.info("Fetching: %s" % url)
|
||||||
try:
|
try:
|
||||||
resp = self.fhdhr.web.session.get(url)
|
resp = self.plugin_utils.web.session.get(url)
|
||||||
except self.fhdhr.web.exceptions.HTTPError:
|
except self.plugin_utils.web.exceptions.HTTPError:
|
||||||
self.fhdhr.logger.info('Got an error! Ignoring it.')
|
self.plugin_utils.logger.info('Got an error! Ignoring it.')
|
||||||
return
|
return
|
||||||
result = resp.json()
|
result = resp.json()
|
||||||
|
|
||||||
self.fhdhr.db.set_cacheitem_value(cache_key, "epg_cache", result, "zap2it")
|
self.plugin_utils.db.set_plugin_value(cache_key, "epg_cache", result, "zap2it")
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
||||||
cache_list.append(cache_key)
|
cache_list.append(cache_key)
|
||||||
self.fhdhr.db.set_cacheitem_value("cache_list", "epg_cache", cache_list, "zap2it")
|
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", cache_list, "zap2it")
|
||||||
|
|
||||||
def remove_stale_cache(self, zap_time):
|
def remove_stale_cache(self, zap_time):
|
||||||
|
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
||||||
cache_to_kill = []
|
cache_to_kill = []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
cachedate = int(cacheitem)
|
cachedate = int(cacheitem)
|
||||||
if cachedate < zap_time:
|
if cachedate < zap_time:
|
||||||
cache_to_kill.append(cacheitem)
|
cache_to_kill.append(cacheitem)
|
||||||
self.fhdhr.db.delete_cacheitem_value(cacheitem, "epg_cache", "zap2it")
|
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "zap2it")
|
||||||
self.fhdhr.logger.info("Removing stale cache: %s" % cacheitem)
|
self.plugin_utils.logger.info("Removing stale cache: %s" % cacheitem)
|
||||||
self.fhdhr.db.set_cacheitem_value("cache_list", "epg_cache", [x for x in cache_list if x not in cache_to_kill], "zap2it")
|
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", [x for x in cache_list if x not in cache_to_kill], "zap2it")
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
self.fhdhr.db.delete_cacheitem_value(cacheitem, "epg_cache", "zap2it")
|
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "zap2it")
|
||||||
self.fhdhr.logger.info("Removing cache: %s" % cacheitem)
|
self.plugin_utils.logger.info("Removing cache: %s" % cacheitem)
|
||||||
self.fhdhr.db.delete_cacheitem_value("cache_list", "epg_cache", "zap2it")
|
self.plugin_utils.db.delete_plugin_value("cache_list", "epg_cache", "zap2it")
|
||||||
5
plugins/fHDHR_plugin_epg_zap2it/plugin.json
Normal file
5
plugins/fHDHR_plugin_epg_zap2it/plugin.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"name":"zap2it",
|
||||||
|
"version":"v0.6.0-beta",
|
||||||
|
"type":"alt_epg"
|
||||||
|
}
|
||||||
14
plugins/fHDHR_plugin_interface_cluster/cluster_conf.json
Normal file
14
plugins/fHDHR_plugin_interface_cluster/cluster_conf.json
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
{
|
||||||
|
"cluster":{
|
||||||
|
"enabled":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"friendlyname":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
157
plugins/fHDHR_plugin_interface_cluster/interface/__init__.py
Normal file
157
plugins/fHDHR_plugin_interface_cluster/interface/__init__.py
Normal file
@ -0,0 +1,157 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, plugin_utils):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.plugin_utils = plugin_utils
|
||||||
|
|
||||||
|
self.friendlyname = self.fhdhr.config.dict["cluster"]["friendlyname"] or "%s %s" % (self.fhdhr.config.dict["fhdhr"]["friendlyname"], self.fhdhr.origins.valid_origins[0])
|
||||||
|
|
||||||
|
if self.plugin_utils.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_cluster_dicts_web(self):
|
||||||
|
fhdhr_list = self.cluster()
|
||||||
|
locations = []
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
item_dict = {
|
||||||
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
|
"name": fhdhr_list[location]["name"]
|
||||||
|
}
|
||||||
|
if item_dict["base_url"] != self.plugin_utils.api.base:
|
||||||
|
locations.append(item_dict)
|
||||||
|
if len(locations):
|
||||||
|
locations = sorted(locations, key=lambda i: i['name'])
|
||||||
|
return locations
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.plugin_utils.api.base:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.plugin_utils.ssdp.ssdp_handling[self.plugin_utils.namespace].detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.plugin_utils.api.base] = {
|
||||||
|
"base_url": self.plugin_utils.api.base,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
self.plugin_utils.logger.info("Syncronizing with Cluster.")
|
||||||
|
cluster = self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if not len(list(cluster.keys())):
|
||||||
|
self.plugin_utils.logger.info("No Cluster Found.")
|
||||||
|
else:
|
||||||
|
self.plugin_utils.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.plugin_utils.api.base:
|
||||||
|
self.plugin_utils.logger.debug("Checking Cluster Syncronization information from %s." % location)
|
||||||
|
sync_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
sync_open = self.plugin_utils.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.plugin_utils.api.base not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.plugin_utils.logger.info("Leaving cluster.")
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.plugin_utils.api.base:
|
||||||
|
self.plugin_utils.logger.info("Informing %s that I am departing the Cluster." % location)
|
||||||
|
sync_url = "%s/api/cluster?method=del&location=%s" % (location, self.plugin_utils.api.base)
|
||||||
|
try:
|
||||||
|
self.plugin_utils.web.session.get(sync_url)
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
sync_open = self.plugin_utils.web.session.get(sync_url)
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.plugin_utils.api.base:
|
||||||
|
sync_url = "%s/api/cluster?method=sync&location=%s" % (location, self.plugin_utils.api.base_quoted)
|
||||||
|
try:
|
||||||
|
self.plugin_utils.web.session.get(sync_url)
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.plugin_utils.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
self.plugin_utils.logger.info("Adding %s to cluster." % location)
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = "%s/api/cluster?method=ident" % location
|
||||||
|
try:
|
||||||
|
location_info_req = self.plugin_utils.web.session.get(location_info_url)
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
del cluster[location]
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["name"]
|
||||||
|
|
||||||
|
cluster_info_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.plugin_utils.web.session.get(cluster_info_url)
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
del cluster[location]
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.plugin_utils.db.get_plugin_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
self.plugin_utils.logger.info("Removing %s from cluster." % location)
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = "%s/api/cluster?method=leave" % location
|
||||||
|
try:
|
||||||
|
self.plugin_utils.web.session.get(sync_url)
|
||||||
|
except self.plugin_utils.web.exceptions.ConnectionError:
|
||||||
|
self.plugin_utils.logger.error("Unreachable: %s" % location)
|
||||||
|
self.push_sync()
|
||||||
|
self.plugin_utils.db.set_plugin_value("cluster", "dict", cluster)
|
||||||
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type":"interface"
|
||||||
|
}
|
||||||
5
plugins/fHDHR_plugin_interface_cluster/plugin.json
Normal file
5
plugins/fHDHR_plugin_interface_cluster/plugin.json
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
{
|
||||||
|
"name":"Cluster",
|
||||||
|
"version":"v0.6.0-beta",
|
||||||
|
"type":"interface"
|
||||||
|
}
|
||||||
70
plugins/fHDHR_plugin_interface_cluster/ssdp/__init__.py
Normal file
70
plugins/fHDHR_plugin_interface_cluster/ssdp/__init__.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
|
||||||
|
class fHDHR_Detect():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
|
def set(self, location):
|
||||||
|
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
if location not in detect_list:
|
||||||
|
detect_list.append(location)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, plugin_utils, broadcast_ip, max_age):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.detect_method = fHDHR_Detect(fhdhr)
|
||||||
|
|
||||||
|
self.broadcast_ip = broadcast_ip
|
||||||
|
self.device_xml_path = '/cluster/device.xml'
|
||||||
|
self.schema = "upnp:rootdevice"
|
||||||
|
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
@property
|
||||||
|
def enabled(self):
|
||||||
|
return self.fhdhr.config.dict["cluster"]["enabled"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def notify(self):
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "NOTIFY * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%d" % ("239.255.255.250", 1900),
|
||||||
|
"NTS": "ssdp:alive",
|
||||||
|
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], self.schema),
|
||||||
|
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"EXT": '',
|
||||||
|
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
||||||
|
"Cache-Control:max-age=": self.max_age,
|
||||||
|
"NT": self.schema,
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
|
def on_recv(self, headers, cmd, ssdp_handling):
|
||||||
|
if cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
try:
|
||||||
|
if headers["server"].startswith("fHDHR"):
|
||||||
|
if headers["location"].endswith("/device.xml"):
|
||||||
|
savelocation = headers["location"].split("/device.xml")[0]
|
||||||
|
if savelocation.endswith("/cluster"):
|
||||||
|
savelocation = savelocation.replace("/cluster", '')
|
||||||
|
if savelocation != self.fhdhr.api.base:
|
||||||
|
self.detect_method.set(savelocation)
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
3
plugins/fHDHR_plugin_interface_cluster/ssdp/plugin.json
Normal file
3
plugins/fHDHR_plugin_interface_cluster/ssdp/plugin.json
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
{
|
||||||
|
"type":"ssdp"
|
||||||
|
}
|
||||||
14
plugins/fHDHR_plugin_interface_cluster/web/__init__.py
Normal file
14
plugins/fHDHR_plugin_interface_cluster/web/__init__.py
Normal file
@ -0,0 +1,14 @@
|
|||||||
|
from .cluster_api import Cluster_API
|
||||||
|
from .cluster_html import Cluster_HTML
|
||||||
|
from .cluster_device_xml import Cluster_Device_XML
|
||||||
|
|
||||||
|
|
||||||
|
class Plugin_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, plugin_utils):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.plugin_utils = plugin_utils
|
||||||
|
|
||||||
|
self.cluster_api = Cluster_API(fhdhr, plugin_utils)
|
||||||
|
self.cluster_html = Cluster_HTML(fhdhr, plugin_utils)
|
||||||
|
self.cluster_device_xml = Cluster_Device_XML(fhdhr, plugin_utils)
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user