mirror of
https://github.com/fHDHR/fHDHR_NextPVR.git
synced 2025-12-06 18:16:58 -05:00
Compare commits
No commits in common. "main" and "v0.4.5-beta" have entirely different histories.
main
...
v0.4.5-bet
17
.github/stale.yml
vendored
17
.github/stale.yml
vendored
@ -1,17 +0,0 @@
|
|||||||
# Number of days of inactivity before an issue becomes stale
|
|
||||||
daysUntilStale: 60
|
|
||||||
# Number of days of inactivity before a stale issue is closed
|
|
||||||
daysUntilClose: 7
|
|
||||||
# Issues with these labels will never be considered stale
|
|
||||||
exemptLabels:
|
|
||||||
- pinned
|
|
||||||
- security
|
|
||||||
# Label to use when marking an issue as stale
|
|
||||||
staleLabel: wontfix
|
|
||||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
|
||||||
markComment: >
|
|
||||||
This issue has been automatically marked as stale because it has not had
|
|
||||||
recent activity. It will be closed if no further activity occurs. Thank you
|
|
||||||
for your contributions.
|
|
||||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
|
||||||
closeComment: false
|
|
||||||
@ -16,7 +16,5 @@ fHDHR is labeled as beta until we reach v1.0.0
|
|||||||
|
|
||||||
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||||
|
|
||||||
# !!NOTICE!!
|
|
||||||
|
|
||||||
To reduce code duplication between variants, I am moving to a plugin system.
|
Due to multiple issues, I'm dropping official support for Windows.
|
||||||
The normal variant repos will stay active during the transition.
|
|
||||||
|
|||||||
62
config.all.ini
Normal file
62
config.all.ini
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
# servicename = NextPVR
|
||||||
|
# reponame = fHDHR_NextPVR
|
||||||
|
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-NextPVR
|
||||||
|
# reporting_firmware_name = fHDHR_NextPVR
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[vlc]
|
||||||
|
# path = cvlc
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1048576
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
|
||||||
|
[nextpvr]
|
||||||
|
# address = localhost
|
||||||
|
# port = 8866
|
||||||
|
# ssl =
|
||||||
|
# pin =
|
||||||
|
|
||||||
|
[zap2it]
|
||||||
|
# delay = 5
|
||||||
|
# postalcode = None
|
||||||
|
# affiliate_id = gapzap
|
||||||
|
# country = USA
|
||||||
|
# device = -
|
||||||
|
# headendid = lineupId
|
||||||
|
# isoverride = True
|
||||||
|
# languagecode = en
|
||||||
|
# pref =
|
||||||
|
# timespan = 6
|
||||||
|
# timezone =
|
||||||
|
# userid = -
|
||||||
@ -1,39 +0,0 @@
|
|||||||
{
|
|
||||||
"database":{
|
|
||||||
"type":{
|
|
||||||
"value": "sqlite",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"driver":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"user":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"pass":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"host":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"port":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"name":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,40 +0,0 @@
|
|||||||
{
|
|
||||||
"epg":{
|
|
||||||
"images":{
|
|
||||||
"value": "pass",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"method":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"update_frequency":{
|
|
||||||
"value": 43200,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"reverse_days": {
|
|
||||||
"value": -1,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"forward_days": {
|
|
||||||
"value": 7,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"block_size": {
|
|
||||||
"value": 1800,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
,
|
|
||||||
"xmltv_offset": {
|
|
||||||
"value": "+0000",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,4 +1,21 @@
|
|||||||
{
|
{
|
||||||
|
"main":{
|
||||||
|
"uuid":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"cache_dir":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"thread_method":{
|
||||||
|
"value": "multiprocessing",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
"fhdhr":{
|
"fhdhr":{
|
||||||
"address":{
|
"address":{
|
||||||
"value": "0.0.0.0",
|
"value": "0.0.0.0",
|
||||||
@ -15,6 +32,26 @@
|
|||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
},
|
||||||
|
"reporting_manufacturer":{
|
||||||
|
"value": "BoronDust",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_model":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_ver":{
|
||||||
|
"value": "20201001",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_tuner_type":{
|
||||||
|
"value": "Antenna",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
"device_auth":{
|
"device_auth":{
|
||||||
"value": "fHDHR",
|
"value": "fHDHR",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
@ -29,11 +66,88 @@
|
|||||||
"value": true,
|
"value": true,
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
},
|
}
|
||||||
"friendlyname":{
|
},
|
||||||
"value": "fHDHR",
|
"epg":{
|
||||||
|
"images":{
|
||||||
|
"value": "pass",
|
||||||
"config_file": true,
|
"config_file": true,
|
||||||
"config_web": true
|
"config_web": true
|
||||||
}
|
}
|
||||||
|
},
|
||||||
|
"ffmpeg":{
|
||||||
|
"path":{
|
||||||
|
"value": "ffmpeg",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vlc":{
|
||||||
|
"path":{
|
||||||
|
"value": "cvlc",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"direct_stream":{
|
||||||
|
"chunksize":{
|
||||||
|
"value": 1048576,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"logging":{
|
||||||
|
"level":{
|
||||||
|
"value": "WARNING",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"database":{
|
||||||
|
"type":{
|
||||||
|
"value": "sqlite",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"driver":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"user":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"pass":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"host":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"name":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
@ -1,9 +0,0 @@
|
|||||||
{
|
|
||||||
"logging":{
|
|
||||||
"level":{
|
|
||||||
"value": "INFO",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"main":{
|
|
||||||
"uuid":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"cache_dir":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"servicename":{
|
|
||||||
"value": "fHDHR",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"reponame":{
|
|
||||||
"value": "fHDHR",
|
|
||||||
"config_file": false,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
91
data/internal_config/serviceconf.json
Normal file
91
data/internal_config/serviceconf.json
Normal file
@ -0,0 +1,91 @@
|
|||||||
|
{
|
||||||
|
"main":{
|
||||||
|
"servicename":{
|
||||||
|
"value": "NextPVR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"dictpopname":{
|
||||||
|
"value": "nextpvr",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"reponame":{
|
||||||
|
"value": "fHDHR_NextPVR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"valid_epg_methods":{
|
||||||
|
"value": "None,blocks,origin,zap2it",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"required":{
|
||||||
|
"value": "nextpvr/pin",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fhdhr":{
|
||||||
|
"friendlyname":{
|
||||||
|
"value": "fHDHR-NextPVR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"stream_type":{
|
||||||
|
"value": "direct",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"tuner_count":{
|
||||||
|
"value": 4,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_name":{
|
||||||
|
"value": "fHDHR_NextPVR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"epg":{
|
||||||
|
"method":{
|
||||||
|
"value": "origin",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"update_frequency":{
|
||||||
|
"value": 43200,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nextpvr":{
|
||||||
|
"address":{
|
||||||
|
"value": "localhost",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": 8866,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"ssl":{
|
||||||
|
"value": false,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"pin":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true,
|
||||||
|
"config_web_hidden": true
|
||||||
|
},
|
||||||
|
"sid":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -1,29 +0,0 @@
|
|||||||
{
|
|
||||||
"ssdp":{
|
|
||||||
"enabled":{
|
|
||||||
"value": true,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"max_age":{
|
|
||||||
"value": 1800,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"proto":{
|
|
||||||
"value": "ipv4",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"iface":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
},
|
|
||||||
"multicast_address":{
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"streaming":{
|
|
||||||
"bytes_per_read": {
|
|
||||||
"value": 1152000,
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"origin_quality": {
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"transcode_quality": {
|
|
||||||
"value": "none",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
},
|
|
||||||
"method": {
|
|
||||||
"value": "direct",
|
|
||||||
"config_file": true,
|
|
||||||
"config_web": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
8
data/www/style.css
Normal file
8
data/www/style.css
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.pull-right { float: right; }
|
||||||
|
|
||||||
|
.pull-lef { float: left; }
|
||||||
|
|
||||||
|
.center {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
56
data/www/templates/base.html
Normal file
56
data/www/templates/base.html
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</title>
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<style>
|
||||||
|
table, th, td {border: 1px solid black;}
|
||||||
|
</style>
|
||||||
|
<link href="style.css" rel="stylesheet">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1 style="text-align: center;">
|
||||||
|
<span style="text-decoration: underline;"><strong><em>{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</em></strong>
|
||||||
|
</span>
|
||||||
|
<img class="pull-left" src="/favicon.ico" alt="fHDHR Logo" width="100" height="100">
|
||||||
|
</h1>
|
||||||
|
<br><br>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
<button class="pull-left" onclick="OpenLink('/')">fHDHR</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/origin')">{{ fhdhr.config.dict["main"]["servicename"] }}</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/channels')">Channels</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/guide')">Guide</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/cluster')">Cluster</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/streams')">Streams</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/xmltv')">xmltv</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/version')">Version</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/diagnostics')">Diagnostics</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/settings')">Settings</a></button>
|
||||||
|
|
||||||
|
<a class="pull-right" style="padding: 5px;" href="/api/xmltv?method=get&source={{ fhdhr.device.epg.def_method }}">xmltv</a>
|
||||||
|
<a class="pull-right" style="padding: 5px;" href="/api/m3u?method=get&channel=all">m3u</a>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<hr align="center" width="100%">
|
||||||
|
|
||||||
|
{% set locations = fhdhr.device.cluster.get_cluster_dicts_web() %}
|
||||||
|
{% if locations %}
|
||||||
|
<div>
|
||||||
|
{% for location in locations %}
|
||||||
|
<button class="pull-left" onclick="OpenLink('{{ location["base_url"] }}')">{{ location["name"] }}</a></button>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<hr align="center" width="100%">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% set retmessage = request.args.get('retmessage', default=None) %}
|
||||||
|
{% if retmessage %}
|
||||||
|
<p>{{ retmessage }}</p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
<script>
|
||||||
|
function OpenLink(NewURL) {window.open(NewURL, "_self");}
|
||||||
|
</script>
|
||||||
74
data/www/templates/channels.html
Normal file
74
data/www/templates/channels.html
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels</h4>
|
||||||
|
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<button onclick="OpenLink('/api/channels?method=scan&redirect=%2Fchannels')">Force Channel Update</a></button><p> Note: This may take some time.</p>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(channels_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ channels_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<button onclick="OpenLink('/channels_editor')">Edit Channels</a></button>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Play</th>
|
||||||
|
<th>Channel Name</th>
|
||||||
|
<th>Channel CallSign</th>
|
||||||
|
<th>Channel Number</th>
|
||||||
|
<th>Channel Thumbnail</th>
|
||||||
|
<th>Enabled</th>
|
||||||
|
<th>Favorite</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for chan_dict in channelslist %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<a href="{{ chan_dict["play_url"] }}">Play</a>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<td>{{ chan_dict["name"] }}</td>
|
||||||
|
<td>{{ chan_dict["callsign"] }}</td>
|
||||||
|
<td>{{ chan_dict["number"] }}</td>
|
||||||
|
|
||||||
|
{% if chan_dict["thumbnail"] %}
|
||||||
|
<td><img src="{{ chan_dict["thumbnail"] }}" alt="{{ chan_dict["name"] }}" width="100" height="100"></td>
|
||||||
|
{% else %}
|
||||||
|
<td>No Image Available</td>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<td>Enabled</td>
|
||||||
|
{% else %}
|
||||||
|
<td>Disabled</td>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if chan_dict["favorite"] %}
|
||||||
|
<td>Yes</td>
|
||||||
|
{% else %}
|
||||||
|
<td>No</td>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
73
data/www/templates/channels_editor.html
Normal file
73
data/www/templates/channels_editor.html
Normal file
@ -0,0 +1,73 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }} Channels Editor</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Play</th>
|
||||||
|
<th>Channel Name</th>
|
||||||
|
<th>Channel CallSign</th>
|
||||||
|
<th>Channel Number</th>
|
||||||
|
<th>Channel Thumbnail</th>
|
||||||
|
<th>Enabled</th>
|
||||||
|
<th>Favorite</th>
|
||||||
|
<th>Update</th>
|
||||||
|
<th>Reset</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for chan_dict in channelslist %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<a href="{{ chan_dict["play_url"] }}">Play</a>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<form method="post" action="/api/channels?method=update&redirect=%2Fchannels_editor">
|
||||||
|
<input type="hidden" name="id" value={{ chan_dict["id"] }}>
|
||||||
|
<td data-th="Channel Name"><input type="text" name="name" value={{ chan_dict["name"] }}></td>
|
||||||
|
<td data-th="Channel Calsign"><input type="text" name="callsign" value={{ chan_dict["callsign"] }}></td>
|
||||||
|
<td data-th="Channel Number"><input type="text" name="number" value={{ chan_dict["number"] }}></td>
|
||||||
|
<td data-th="Channel Thumbnail"><input type="text" name="thumbnail" value={{ chan_dict["thumbnail"] }}></td>
|
||||||
|
<td>
|
||||||
|
<select name="enabled">
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<option value=True selected>Enabled</option>
|
||||||
|
<option value=False>Disabled</option>
|
||||||
|
{% else %}
|
||||||
|
<option value=True>Enabled</option>
|
||||||
|
<option value=False selected>Disabled</option>
|
||||||
|
{% endif %}
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
<td>
|
||||||
|
<select name="favorite">
|
||||||
|
{% if chan_dict["favorite"] %}
|
||||||
|
<option value=1 selected>Yes</option>
|
||||||
|
<option value=0>No</option>
|
||||||
|
{% else %}
|
||||||
|
<option value=1>Yes</option>
|
||||||
|
<option value=0 selected>No</option>
|
||||||
|
{% endif %}
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
<td data-th="Update"><input type="submit" value="Update"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<form method="post" action="/api/channels?method=update&redirect=%2Fchannels_editor">
|
||||||
|
<input type="hidden" name="id" value={{ chan_dict["id"] }}>
|
||||||
|
<input type="hidden" name="name" value={{ chan_dict["origin_name"] }}>
|
||||||
|
<input type="hidden" name="callsign" value={{ chan_dict["origin_callsign"] }}>
|
||||||
|
<input type="hidden" name="number" value={{ chan_dict["origin_number"] }}>
|
||||||
|
<input type="hidden" name="thumbnail" value={{ chan_dict["origin_thumbnail"] }}>
|
||||||
|
<input type="hidden" name="enabled" value=True>
|
||||||
|
<td data-th="Reset"><input type="submit" value="Reset"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
50
data/www/templates/cluster.html
Normal file
50
data/www/templates/cluster.html
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">Cluster</h4>
|
||||||
|
{% if not fhdhr.config.dict["fhdhr"]["discovery_address"] %}
|
||||||
|
<p style="text-align: center;">Discovery Address must be set for SSDP/Cluster</p>
|
||||||
|
{% else %}
|
||||||
|
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=scan&redirect=%2Fcluster')">Force Scan</a></button>
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=disconnect&redirect=%2Fcluster')">Disconnect</a></button>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Location</th>
|
||||||
|
<th>Joined</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for location in locations_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ location["name"] }}</td>
|
||||||
|
<td>{{ location["location"] }}</td>
|
||||||
|
<td>{{ location["joined"] }}</td>
|
||||||
|
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
{% if location["joined"] in ["True", "False"] %}
|
||||||
|
<button onclick="OpenLink('{{ location["location"] }}')">Visit</a></button>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if location["joined"] == "True" %}
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=del&location={{ location["url_query"] }}&redirect=%2Fcluster')">Remove</a></button>
|
||||||
|
{% elif location["joined"] == "False" %}
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=add&location={{ location["url_query"] }}&redirect=%2Fcluster')">Add</a></button>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
11
data/www/templates/diagnostics.html
Normal file
11
data/www/templates/diagnostics.html
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% for button_item in button_list %}
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<p><button onclick="OpenLink('{{ button_item[1] }}')">{{ button_item[0] }}</a></button></p>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
36
data/www/templates/guide.html
Normal file
36
data/www/templates/guide.html
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">What's On {{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</h4>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
{% for epg_method in epg_methods %}
|
||||||
|
<button onclick="OpenLink('/guide?source={{ epg_method }}')">{{ epg_method }}</a></button>
|
||||||
|
{% endfor %}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Channel Name</th>
|
||||||
|
<th>Channel Number</th>
|
||||||
|
<th>Channel Thumbnail</th>
|
||||||
|
<th>Content Title</th>
|
||||||
|
<th>Content Thumbnail</th>
|
||||||
|
<th>Content Description</th>
|
||||||
|
<th>Content Remaining Time</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for chan_dict in chan_guide_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ chan_dict["name"] }}</td>
|
||||||
|
<td>{{ chan_dict["number"] }}</td>
|
||||||
|
<td><img src="{{ chan_dict["chan_thumbnail"] }}" alt="{{ chan_dict["name"] }}" width="100" height="100"></td>
|
||||||
|
<td>{{ chan_dict["listing_title"] }}</td>
|
||||||
|
<td><img src="{{ chan_dict["listing_thumbnail"] }}" alt="{{ chan_dict["listing_title"] }}" width="100" height="100"></td>
|
||||||
|
<td>{{ chan_dict["listing_description"] }}</td>
|
||||||
|
<td>{{ chan_dict["remaining_time"] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
20
data/www/templates/index.html
Normal file
20
data/www/templates/index.html
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Status</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(fhdhr_status_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ fhdhr_status_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
20
data/www/templates/origin.html
Normal file
20
data/www/templates/origin.html
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["main"]["servicename"] }} Status</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(origin_status_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ origin_status_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
60
data/www/templates/settings.html
Normal file
60
data/www/templates/settings.html
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Settings</h4>
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">Settings will require a manual restart.</h4>
|
||||||
|
|
||||||
|
{% for config_section in list(web_settings_dict.keys()) %}
|
||||||
|
|
||||||
|
{% if config_section == "origin" %}
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["main"]["dictpopname"] }}</h4>
|
||||||
|
{% else %}
|
||||||
|
<h4 style="text-align: center;">{{ config_section }}</h4>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Config Name</th>
|
||||||
|
<th>Config Default Value</th>
|
||||||
|
<th>Config Value</th>
|
||||||
|
<th>Update</th>
|
||||||
|
<th>Reset</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for config_item in list(web_settings_dict[config_section].keys()) %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td data-th="Config Name">{{ config_item }}</td>
|
||||||
|
|
||||||
|
<td data-th="Config Default Value">{{ web_settings_dict[config_section][config_item]["value_default"] }}</td>
|
||||||
|
|
||||||
|
<form method="post" action="/api/settings?method=update&redirect=%2Fsettings">
|
||||||
|
<input type="hidden" name="config_section" value={{ config_section }}>
|
||||||
|
<input type="hidden" name="config_name" value={{ config_item }}>
|
||||||
|
<input type="hidden" name="config_default" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
{% if web_settings_dict[config_section][config_item]["hide"] %}
|
||||||
|
<td data-th="Config Value"><input type="text" size="50" name="config_value" value=**************></td>
|
||||||
|
{% else %}
|
||||||
|
<td data-th="Config Value"><input type="text" size="50" name="config_value" value={{ web_settings_dict[config_section][config_item]["value"] }}></td>
|
||||||
|
{% endif %}
|
||||||
|
<td data-th="Update"><input type="submit" value="Update"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<form method="post" action="/api/settings?method=update&redirect=%2Fsettings">
|
||||||
|
<input type="hidden" name="config_section" value={{ config_section }}>
|
||||||
|
<input type="hidden" name="config_name" value={{ config_item }}>
|
||||||
|
<input type="hidden" name="config_value" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
<input type="hidden" name="config_default" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
<td data-th="Reset"><input type="submit" value="Reset"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
43
data/www/templates/streams.html
Normal file
43
data/www/templates/streams.html
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Streams</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Tuner</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Channel</th>
|
||||||
|
<th>Method</th>
|
||||||
|
<th>Time Active</th>
|
||||||
|
<th>Total Downloaded</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for tuner_dict in tuner_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ tuner_dict["number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["status"] }}</td>
|
||||||
|
{% if tuner_dict["status"] == "Active" %}
|
||||||
|
<td>{{ tuner_dict["channel_number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["method"] }}</td>
|
||||||
|
<td>{{ tuner_dict["play_duration"] }}</td>
|
||||||
|
<td>{{ tuner_dict["downloaded"] }}</td>
|
||||||
|
{% else %}
|
||||||
|
<td>N/A</td>
|
||||||
|
<td>N/A</td>
|
||||||
|
<td>N/A</td>
|
||||||
|
<td>N/A</td>
|
||||||
|
{% endif %}
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
{% if tuner_dict["status"] in ["Active", "Acquired"] %}
|
||||||
|
<button onclick="OpenLink('/api/watch?method=close&tuner={{ tuner_dict["number"] }}&redirect=%2Fstreams')">Close</a></button>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
20
data/www/templates/version.html
Normal file
20
data/www/templates/version.html
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Version Information</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(version_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ version_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
38
data/www/templates/xmltv.html
Normal file
38
data/www/templates/xmltv.html
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">xmltv</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th>Version</th>
|
||||||
|
<th>XMLTV Link</th>
|
||||||
|
<th>EPG Link</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for epg_method in fhdhr.config.dict["main"]["valid_epg_methods"] %}
|
||||||
|
{% if epg_method not in [None, "None"] %}
|
||||||
|
{% set epg_method_name = epg_method %}
|
||||||
|
{% if epg_method == "origin" %}
|
||||||
|
{% set epg_method_name = fhdhr.config.dict["main"]["dictpopname"] %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td>{{ epg_method_name }}</td>
|
||||||
|
<td><a href="/api/xmltv?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
||||||
|
<td><a href="/api/epg?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
<button onclick="OpenLink('/api/xmltv?method=update&source={{ epg_method }}&redirect=%2Fxmltv')">Update</a></button>
|
||||||
|
<button onclick="OpenLink('/api/xmltv?method=clearcache&source={{ epg_method }}&redirect=%2Fxmltv')">Clear Cache</a></button>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
@ -28,33 +28,14 @@ Here's the `main` section.
|
|||||||
# cache_dir =
|
# cache_dir =
|
||||||
````
|
````
|
||||||
|
|
||||||
## streaming
|
|
||||||
|
|
||||||
* `method` can be set to `ffmpeg`, `vlc` or `direct`.
|
|
||||||
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
|
||||||
* `origin_quality` can be set to high,medium,low for most variants. Variants that make use of m3u8 will Autoselect High for the direct method if not set. ffmpeg/vlc will determine the best stream on their own. Some Variants can allow alternative values.
|
|
||||||
* `transcode_quality` works with ffmpeg/vlc to use fHDHR for handling quality instead of the origin. Valid settings include: heavy,mobile,internet720,internet480,internet360,internet240
|
|
||||||
|
|
||||||
|
|
||||||
````
|
|
||||||
[streaming]
|
|
||||||
# method = direct
|
|
||||||
# bytes_per_read = 1152000
|
|
||||||
# origin_quality = None
|
|
||||||
# transcode_quality = None
|
|
||||||
````
|
|
||||||
|
|
||||||
|
|
||||||
## fhdhr
|
## fhdhr
|
||||||
|
|
||||||
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
||||||
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
||||||
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
||||||
* `tuner_count` is a limit of devices able to stream from the script. The default is 3, as per Locast's documentation. A 4th is possible, but is not reccomended.
|
* `tuner_count` is a limit of devices able to stream from the script.
|
||||||
* `friendlyname` is to set the name that Plex sees the script as.
|
* `friendlyname` is to set the name that Plex sees the script as.
|
||||||
* `reporting_*` are settings that show how the script projects itself as a hardware device.
|
* `stream_type` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||||
* `device_auth` and `require_auth` are for an unimplemented Authentication feature.
|
|
||||||
* `chanscan_on_start` Scans Origin for new channels at startup.
|
|
||||||
|
|
||||||
|
|
||||||
````
|
````
|
||||||
@ -62,56 +43,61 @@ The `fhdhr` contains all the configuration options for interfacing between this
|
|||||||
# address = 0.0.0.0
|
# address = 0.0.0.0
|
||||||
# discovery_address = 0.0.0.0
|
# discovery_address = 0.0.0.0
|
||||||
# port = 5004
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
# tuner_count = 4
|
# tuner_count = 4
|
||||||
# friendlyname = fHDHR-Locast
|
# friendlyname = fHDHR-NextPVR
|
||||||
# reporting_firmware_name = fHDHR_Locast
|
# reporting_firmware_name = fHDHR_NextPVR
|
||||||
# reporting_manufacturer = BoronDust
|
# reporting_manufacturer = BoronDust
|
||||||
# reporting_model = fHDHR
|
# reporting_model = fHDHR
|
||||||
# reporting_firmware_ver = 20201001
|
# reporting_firmware_ver = 20201001
|
||||||
# reporting_tuner_type = Antenna
|
# reporting_tuner_type = Antenna
|
||||||
# device_auth = fHDHR
|
# device_auth = fHDHR
|
||||||
# require_auth = False
|
|
||||||
# chanscan_on_start = True
|
|
||||||
````
|
````
|
||||||
|
|
||||||
# EPG
|
# EPG
|
||||||
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
||||||
* `method` defaults to `origin` and will pull the xmltv data from Locast. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
* `method` defaults to `origin` and will pull the xmltv data from NextPVR. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
||||||
* `update_frequency` determines how often we check for new scheduling information. In Seconds.
|
* `update_frequency` * `epg_update_frequency` determines how often we check for new scheduling information. In Seconds.
|
||||||
* `reverse_days` allows Blocks of EPG data to be created prior to the start of the EPG Source data.
|
|
||||||
* `forward_days` allows Blocks of EPG data to be created after the end of the EPG Source data.
|
|
||||||
* `block_size` in seconds, sets the default block size for data before, after and missing timeslots.
|
|
||||||
* `xmltv_offset` allows the final xmltv file to have an offset for users with timezone issues.
|
|
||||||
|
|
||||||
````
|
````
|
||||||
[epg]
|
[epg]
|
||||||
# images = pass
|
# images = pass
|
||||||
# method = origin
|
# method = origin
|
||||||
# update_frequency = 43200
|
# update_frequency = 43200
|
||||||
# reverse_days = -1
|
|
||||||
# forward_days = 7
|
|
||||||
# block_size = 1800
|
|
||||||
# xmltv_offset = +0000
|
|
||||||
````
|
````
|
||||||
|
|
||||||
## ffmpeg
|
## ffmpeg
|
||||||
|
|
||||||
The `ffmpeg` section includes:
|
The `ffmpeg` section includes:
|
||||||
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
````
|
````
|
||||||
[ffmpeg]
|
[ffmpeg]
|
||||||
# path = ffmpeg
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
````
|
````
|
||||||
|
|
||||||
## vlc
|
## vlc
|
||||||
|
|
||||||
The `vlc` section includes:
|
The `vlc` section includes:
|
||||||
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
````
|
````
|
||||||
[vlc]
|
[vlc]
|
||||||
# path = cvlc
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## direct_stream
|
||||||
|
|
||||||
|
The `direct_stream` section is for when you set the `[fhdhr]stream_type` to `direct`
|
||||||
|
* `chunksize` is how much data to read at a time.
|
||||||
|
|
||||||
|
````
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1024*1024
|
||||||
````
|
````
|
||||||
|
|
||||||
# Logging
|
# Logging
|
||||||
@ -131,27 +117,6 @@ TODO: improve documentation here.
|
|||||||
[database]
|
[database]
|
||||||
# type = sqlite
|
# type = sqlite
|
||||||
# driver = None
|
# driver = None
|
||||||
user = None
|
|
||||||
pass = None
|
|
||||||
host = None
|
|
||||||
port = None
|
|
||||||
name = None
|
|
||||||
````
|
|
||||||
|
|
||||||
## RMG
|
|
||||||
|
|
||||||
````
|
|
||||||
# enabled = True
|
|
||||||
````
|
|
||||||
|
|
||||||
## SSDP
|
|
||||||
|
|
||||||
````
|
|
||||||
# enabled = True
|
|
||||||
# max_age = 1800
|
|
||||||
# proto = ipv6
|
|
||||||
# iface = None
|
|
||||||
# multicast_address = None
|
|
||||||
````
|
````
|
||||||
|
|
||||||
## NextPVR
|
## NextPVR
|
||||||
|
|||||||
@ -1,42 +1,34 @@
|
|||||||
# coding=utf-8
|
# coding=utf-8
|
||||||
|
|
||||||
|
from .origin import OriginServiceWrapper
|
||||||
from .device import fHDHR_Device
|
from .device import fHDHR_Device
|
||||||
from .api import fHDHR_API_URLs
|
|
||||||
|
|
||||||
import fHDHR.tools
|
import fHDHR.tools
|
||||||
fHDHR_VERSION = "v0.6.0-beta"
|
|
||||||
|
fHDHR_VERSION = "v0.4.5-beta"
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_INT_OBJ():
|
class fHDHR_INT_OBJ():
|
||||||
|
|
||||||
def __init__(self, settings, logger, db, plugins):
|
def __init__(self, settings, logger, db):
|
||||||
self.version = fHDHR_VERSION
|
self.version = fHDHR_VERSION
|
||||||
self.config = settings
|
self.config = settings
|
||||||
self.logger = logger
|
self.logger = logger
|
||||||
self.db = db
|
self.db = db
|
||||||
self.plugins = plugins
|
|
||||||
|
|
||||||
self.web = fHDHR.tools.WebReq()
|
self.web = fHDHR.tools.WebReq()
|
||||||
for plugin_name in list(self.plugins.plugins.keys()):
|
|
||||||
self.plugins.plugins[plugin_name].plugin_utils.web = self.web
|
|
||||||
|
|
||||||
self.api = fHDHR_API_URLs(settings, self.web)
|
|
||||||
for plugin_name in list(self.plugins.plugins.keys()):
|
|
||||||
self.plugins.plugins[plugin_name].plugin_utils.api = self.api
|
|
||||||
|
|
||||||
self.threads = {}
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_OBJ():
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
def __init__(self, settings, logger, db, plugins):
|
def __init__(self, settings, logger, db):
|
||||||
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db, plugins)
|
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db)
|
||||||
|
|
||||||
self.fhdhr.origins = fHDHR.origins.Origins(self.fhdhr)
|
self.origin = OriginServiceWrapper(self.fhdhr)
|
||||||
|
|
||||||
self.device = fHDHR_Device(self.fhdhr, self.fhdhr.origins)
|
self.device = fHDHR_Device(self.fhdhr, self.origin)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
if hasattr(self.fhdhr, name):
|
if hasattr(self.fhdhr, name):
|
||||||
return eval("self.fhdhr.%s" % name)
|
return eval("self.fhdhr." + name)
|
||||||
|
|||||||
@ -1,82 +0,0 @@
|
|||||||
import urllib.parse
|
|
||||||
|
|
||||||
|
|
||||||
class Fillin_Client():
|
|
||||||
|
|
||||||
def __init__(self, settings, web):
|
|
||||||
self.config = settings
|
|
||||||
self.web = web
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
''' will only get called for undefined attributes '''
|
|
||||||
if hasattr(self.web.session, name):
|
|
||||||
return eval("self.web.session.%s" % name)
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_API_URLs():
|
|
||||||
|
|
||||||
def __init__(self, settings, web):
|
|
||||||
self.config = settings
|
|
||||||
self.web = web
|
|
||||||
|
|
||||||
self.headers = {'User-Agent': "fHDHR/%s" % self.config.internal["versions"]["fHDHR"]}
|
|
||||||
|
|
||||||
# Replaced later
|
|
||||||
self.client = Fillin_Client(settings, web)
|
|
||||||
|
|
||||||
self.address = self.config.dict["fhdhr"]["address"]
|
|
||||||
self.discovery_address = self.config.dict["fhdhr"]["discovery_address"]
|
|
||||||
self.port = self.config.dict["fhdhr"]["port"]
|
|
||||||
|
|
||||||
def get(self, url, *args):
|
|
||||||
|
|
||||||
req_method = type(self.client).__name__
|
|
||||||
|
|
||||||
if not url.startswith("http"):
|
|
||||||
if not url.startswith("/"):
|
|
||||||
url = "/%s" % url
|
|
||||||
url = "%s%s" % (self.base, url)
|
|
||||||
|
|
||||||
if req_method == "FlaskClient":
|
|
||||||
self.client.get(url, headers=self.headers, *args)
|
|
||||||
else:
|
|
||||||
self.client.get(url, headers=self.headers, *args)
|
|
||||||
|
|
||||||
def post(self, url, *args):
|
|
||||||
|
|
||||||
req_method = type(self.client).__name__
|
|
||||||
|
|
||||||
if not url.startswith("http"):
|
|
||||||
if not url.startswith("/"):
|
|
||||||
url = "/%s" % url
|
|
||||||
url = "%s%s" % (self.base, url)
|
|
||||||
|
|
||||||
if req_method == "FlaskClient":
|
|
||||||
self.client.post(url, headers=self.headers, *args)
|
|
||||||
else:
|
|
||||||
self.client.post(url, headers=self.headers, *args)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def base(self):
|
|
||||||
if self.discovery_address:
|
|
||||||
return ('http://%s:%s' % self.discovery_address_tuple)
|
|
||||||
elif self.address == "0.0.0.0":
|
|
||||||
return ('http://%s:%s' % self.address_tuple)
|
|
||||||
else:
|
|
||||||
return ('http://%s:%s' % self.address_tuple)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def base_quoted(self):
|
|
||||||
return urllib.parse.quote(self.base)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def discovery_address_tuple(self):
|
|
||||||
return (self.discovery_address, int(self.port))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def localhost_address_tuple(self):
|
|
||||||
return ("127.0.0.1", int(self.port))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def address_tuple(self):
|
|
||||||
return (self.address, int(self.port))
|
|
||||||
@ -2,13 +2,14 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
import argparse
|
import argparse
|
||||||
import time
|
import time
|
||||||
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
import platform
|
||||||
|
|
||||||
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||||
import fHDHR.exceptions
|
import fHDHR.exceptions
|
||||||
import fHDHR.config
|
import fHDHR.config
|
||||||
import fHDHR.logger
|
from fHDHR.http import fHDHR_HTTP_Server
|
||||||
import fHDHR.plugins
|
|
||||||
import fHDHR.origins
|
|
||||||
from fHDHR.db import fHDHRdb
|
from fHDHR.db import fHDHRdb
|
||||||
|
|
||||||
ERR_CODE = 1
|
ERR_CODE = 1
|
||||||
@ -19,6 +20,10 @@ if sys.version_info.major == 2 or sys.version_info < (3, 7):
|
|||||||
print('Error: fHDHR requires python 3.7+.')
|
print('Error: fHDHR requires python 3.7+.')
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
if opersystem in ["Windows"]:
|
||||||
|
print("WARNING: This script may fail on Windows. Try Setting the `thread_method` to `threading`")
|
||||||
|
|
||||||
|
|
||||||
def build_args_parser():
|
def build_args_parser():
|
||||||
"""Build argument parser for fHDHR"""
|
"""Build argument parser for fHDHR"""
|
||||||
@ -27,38 +32,48 @@ def build_args_parser():
|
|||||||
return parser.parse_args()
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
def get_configuration(args, script_dir, fHDHR_web):
|
def get_configuration(args, script_dir):
|
||||||
if not os.path.isfile(args.cfg):
|
if not os.path.isfile(args.cfg):
|
||||||
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||||
return fHDHR.config.Config(args.cfg, script_dir, fHDHR_web)
|
return fHDHR.config.Config(args.cfg, script_dir)
|
||||||
|
|
||||||
|
|
||||||
def run(settings, logger, db, script_dir, fHDHR_web, plugins):
|
def run(settings, logger, db):
|
||||||
|
|
||||||
fhdhr = fHDHR_OBJ(settings, logger, db, plugins)
|
fhdhr = fHDHR_OBJ(settings, logger, db)
|
||||||
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
fhdhrweb = fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
|
||||||
# Start Flask Thread
|
print("HTTP Server Starting")
|
||||||
fhdhrweb.start()
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_web = threading.Thread(target=fhdhrweb.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_web.start()
|
||||||
|
|
||||||
# Start SSDP Thread
|
|
||||||
if settings.dict["fhdhr"]["discovery_address"]:
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
fhdhr.device.ssdp.start()
|
print("SSDP Server Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_ssdp = threading.Thread(target=fhdhr.device.ssdp.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_ssdp.start()
|
||||||
|
|
||||||
# Start EPG Thread
|
|
||||||
if settings.dict["epg"]["method"]:
|
if settings.dict["epg"]["method"]:
|
||||||
fhdhr.device.epg.start()
|
print("EPG Update Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
# Perform some actions now that HTTP Server is running
|
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
|
||||||
fhdhr.api.get("/api/startup_tasks")
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_epg = threading.Thread(target=fhdhr.device.epg.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_epg.start()
|
||||||
|
|
||||||
# wait forever
|
# wait forever
|
||||||
restart_code = "restart"
|
while True:
|
||||||
while fhdhr.threads["flask"].is_alive():
|
time.sleep(3600)
|
||||||
time.sleep(1)
|
|
||||||
return restart_code
|
|
||||||
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
return ERR_CODE_NO_RESTART
|
return ERR_CODE_NO_RESTART
|
||||||
@ -66,52 +81,30 @@ def run(settings, logger, db, script_dir, fHDHR_web, plugins):
|
|||||||
return ERR_CODE
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
def start(args, script_dir, fHDHR_web):
|
def start(args, script_dir):
|
||||||
"""Get Configuration for fHDHR and start"""
|
"""Get Configuration for fHDHR and start"""
|
||||||
|
|
||||||
try:
|
try:
|
||||||
settings = get_configuration(args, script_dir, fHDHR_web)
|
settings = get_configuration(args, script_dir)
|
||||||
except fHDHR.exceptions.ConfigurationError as e:
|
except fHDHR.exceptions.ConfigurationError as e:
|
||||||
print(e)
|
print(e)
|
||||||
return ERR_CODE_NO_RESTART
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
# Find Plugins and import their default configs
|
logger = settings.logging_setup()
|
||||||
plugins = fHDHR.plugins.PluginsHandler(settings)
|
|
||||||
|
|
||||||
# Apply User Configuration
|
|
||||||
settings.user_config()
|
|
||||||
settings.config_verification()
|
|
||||||
|
|
||||||
# Setup Logging
|
|
||||||
logger = fHDHR.logger.Logger(settings)
|
|
||||||
|
|
||||||
# Setup Database
|
|
||||||
db = fHDHRdb(settings)
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
# Setup Plugins
|
return run(settings, logger, db)
|
||||||
plugins.load_plugins(logger, db)
|
|
||||||
plugins.setup()
|
|
||||||
settings.config_verification_plugins()
|
|
||||||
|
|
||||||
if not len([x for x in list(plugins.plugins.keys()) if plugins.plugins[x].type == "origin"]):
|
|
||||||
print("No Origin Plugins found.")
|
|
||||||
return ERR_CODE
|
|
||||||
|
|
||||||
return run(settings, logger, db, script_dir, fHDHR_web, plugins)
|
|
||||||
|
|
||||||
|
|
||||||
def main(script_dir, fHDHR_web):
|
def main(script_dir):
|
||||||
"""fHDHR run script entry point"""
|
"""fHDHR run script entry point"""
|
||||||
|
|
||||||
print("Loading fHDHR %s" % fHDHR_VERSION)
|
print("Loading fHDHR " + fHDHR_VERSION)
|
||||||
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
args = build_args_parser()
|
args = build_args_parser()
|
||||||
while True:
|
return start(args, script_dir)
|
||||||
returned_code = start(args, script_dir, fHDHR_web)
|
|
||||||
if returned_code not in ["restart"]:
|
|
||||||
return returned_code
|
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n\nInterrupted")
|
print("\n\nInterrupted")
|
||||||
return ERR_CODE
|
return ERR_CODE
|
||||||
|
|||||||
@ -3,6 +3,8 @@ import sys
|
|||||||
import random
|
import random
|
||||||
import configparser
|
import configparser
|
||||||
import pathlib
|
import pathlib
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
import platform
|
import platform
|
||||||
import json
|
import json
|
||||||
|
|
||||||
@ -13,33 +15,27 @@ from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
|||||||
|
|
||||||
class Config():
|
class Config():
|
||||||
|
|
||||||
def __init__(self, filename, script_dir, fHDHR_web):
|
def __init__(self, filename, script_dir):
|
||||||
self.fHDHR_web = fHDHR_web
|
|
||||||
|
|
||||||
self.internal = {}
|
self.internal = {}
|
||||||
self.conf_default = {}
|
self.conf_default = {}
|
||||||
self.dict = {}
|
self.dict = {}
|
||||||
self.internal["versions"] = {}
|
|
||||||
self.config_file = filename
|
self.config_file = filename
|
||||||
|
|
||||||
self.core_setup(script_dir)
|
self.initial_load(script_dir)
|
||||||
|
self.config_verification()
|
||||||
|
|
||||||
def core_setup(self, script_dir):
|
def initial_load(self, script_dir):
|
||||||
|
|
||||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
internal_plugins_dir = pathlib.Path(script_dir).joinpath('plugins')
|
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||||
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
|
||||||
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
|
||||||
|
|
||||||
self.internal["paths"] = {
|
self.internal["paths"] = {
|
||||||
"script_dir": script_dir,
|
"script_dir": script_dir,
|
||||||
"data_dir": data_dir,
|
"data_dir": data_dir,
|
||||||
"plugins_dir": [internal_plugins_dir],
|
|
||||||
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||||
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||||
"fHDHR_web_dir": fHDHR_web_dir,
|
|
||||||
"www_dir": www_dir,
|
"www_dir": www_dir,
|
||||||
"www_templates_dir": pathlib.Path(fHDHR_web_dir).joinpath('templates'),
|
"www_templates_dir": pathlib.Path(www_dir).joinpath('templates'),
|
||||||
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -48,66 +44,21 @@ class Config():
|
|||||||
if str(conffilepath).endswith(".json"):
|
if str(conffilepath).endswith(".json"):
|
||||||
self.read_json_config(conffilepath)
|
self.read_json_config(conffilepath)
|
||||||
|
|
||||||
for file_item in os.listdir(self.internal["paths"]["fHDHR_web_dir"]):
|
print("Loading Configuration File: " + str(self.config_file))
|
||||||
file_item_path = pathlib.Path(self.internal["paths"]["fHDHR_web_dir"]).joinpath(file_item)
|
self.read_ini_config(self.config_file)
|
||||||
if str(file_item_path).endswith("_conf.json"):
|
|
||||||
self.read_json_config(file_item_path)
|
|
||||||
|
|
||||||
self.dict["epg"]["valid_methods"] = {None: {}}
|
|
||||||
self.dict["origins"] = {}
|
|
||||||
self.dict["origins"]["valid_methods"] = {}
|
|
||||||
self.dict["streaming"]["valid_methods"] = {"direct": {}}
|
|
||||||
self.dict["plugin_web_paths"] = {}
|
|
||||||
|
|
||||||
self.load_versions()
|
self.load_versions()
|
||||||
|
|
||||||
def register_web_path(self, name, path, plugin_dict_name):
|
|
||||||
self.dict["plugin_web_paths"][name.lower()] = {
|
|
||||||
"name": name,
|
|
||||||
"namespace": name.lower(),
|
|
||||||
"path": path,
|
|
||||||
"plugin": plugin_dict_name
|
|
||||||
}
|
|
||||||
|
|
||||||
def register_valid_origin_method(self, method_item):
|
|
||||||
self.dict["origins"]["valid_methods"][method_item.lower()] = {
|
|
||||||
"name": method_item,
|
|
||||||
"namespace": method_item.lower(),
|
|
||||||
}
|
|
||||||
|
|
||||||
def register_valid_streaming_method(self, method_item, plugin_dict_name):
|
|
||||||
self.dict["streaming"]["valid_methods"][method_item.lower()] = {
|
|
||||||
"name": method_item,
|
|
||||||
"namespace": method_item.lower(),
|
|
||||||
"plugin": plugin_dict_name
|
|
||||||
}
|
|
||||||
|
|
||||||
def register_valid_epg_method(self, method_item, plugin_dict_name):
|
|
||||||
self.dict["epg"]["valid_methods"][method_item.lower()] = {
|
|
||||||
"name": method_item,
|
|
||||||
"namespace": method_item.lower(),
|
|
||||||
"plugin": plugin_dict_name
|
|
||||||
}
|
|
||||||
|
|
||||||
def register_version(self, item_name, item_version, item_type):
|
|
||||||
self.internal["versions"][item_name] = {
|
|
||||||
"name": item_name,
|
|
||||||
"version": item_version,
|
|
||||||
"type": item_type
|
|
||||||
}
|
|
||||||
|
|
||||||
def import_conf_json(self, file_item_path):
|
|
||||||
self.read_json_config(file_item_path)
|
|
||||||
|
|
||||||
def load_versions(self):
|
def load_versions(self):
|
||||||
|
|
||||||
self.register_version("fHDHR", fHDHR_VERSION, "fHDHR")
|
self.internal["versions"] = {}
|
||||||
self.register_version("fHDHR_web", self.fHDHR_web.fHDHR_web_VERSION, "fHDHR")
|
|
||||||
|
|
||||||
self.register_version("Python", sys.version, "env")
|
self.internal["versions"]["fHDHR"] = fHDHR_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["Python"] = sys.version
|
||||||
|
|
||||||
opersystem = platform.system()
|
opersystem = platform.system()
|
||||||
self.register_version("Operating System", opersystem, "env")
|
self.internal["versions"]["Operating System"] = opersystem
|
||||||
if opersystem in ["Linux", "Darwin"]:
|
if opersystem in ["Linux", "Darwin"]:
|
||||||
# Linux/Mac
|
# Linux/Mac
|
||||||
if os.getuid() == 0 or os.geteuid() == 0:
|
if os.getuid() == 0 or os.geteuid() == 0:
|
||||||
@ -120,90 +71,41 @@ class Config():
|
|||||||
print("Uncommon Operating System, use at your own risk.")
|
print("Uncommon Operating System, use at your own risk.")
|
||||||
|
|
||||||
isdocker = is_docker()
|
isdocker = is_docker()
|
||||||
self.register_version("Docker", isdocker, "env")
|
self.internal["versions"]["Docker"] = isdocker
|
||||||
|
|
||||||
def user_config(self):
|
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
print("Loading Configuration File: %s" % self.config_file)
|
try:
|
||||||
self.read_ini_config(self.config_file)
|
ffmpeg_command = [self.dict["ffmpeg"]["path"],
|
||||||
|
"-version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
def config_verification_plugins(self):
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
required_missing = {}
|
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||||
# create dict and combine items
|
ffmpeg_proc.terminate()
|
||||||
for config_section in list(self.conf_default.keys()):
|
ffmpeg_proc.communicate()
|
||||||
for config_item in list(self.conf_default[config_section].keys()):
|
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||||
if self.conf_default[config_section][config_item]["required"]:
|
except FileNotFoundError:
|
||||||
if not self.dict[config_section][config_item]:
|
ffmpeg_version = "Missing"
|
||||||
if config_section not in list(required_missing.keys()):
|
print("Failed to find ffmpeg.")
|
||||||
required_missing[config_section] = []
|
self.internal["versions"]["ffmpeg"] = ffmpeg_version
|
||||||
required_missing[config_section].append(config_item)
|
|
||||||
for config_section in list(required_missing.keys()):
|
|
||||||
print("Warning! Required configuration options missing: [%s]%s" % (config_section, ", ".join(required_missing[config_section])))
|
|
||||||
|
|
||||||
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
if self.dict["fhdhr"]["stream_type"] == "vlc":
|
||||||
if isinstance(self.dict["epg"]["method"], str):
|
try:
|
||||||
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
vlc_command = [self.dict["vlc"]["path"],
|
||||||
epg_methods = []
|
"--version",
|
||||||
for epg_method in self.dict["epg"]["method"]:
|
"pipe:stdout"
|
||||||
if epg_method in list(self.dict["epg"]["valid_methods"].keys()):
|
]
|
||||||
epg_methods.append(epg_method)
|
|
||||||
elif epg_method in list(self.dict["origins"]["valid_methods"].keys()):
|
|
||||||
epg_methods.append(epg_method)
|
|
||||||
else:
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
|
||||||
if self.dict["epg"]["method"]:
|
|
||||||
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
|
||||||
else:
|
|
||||||
self.dict["epg"]["def_method"] = None
|
|
||||||
|
|
||||||
if self.dict["streaming"]["method"] not in self.dict["streaming"]["valid_methods"]:
|
vlc_proc = subprocess.Popen(vlc_command, stdout=subprocess.PIPE)
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
vlc_version = vlc_proc.stdout.read()
|
||||||
|
vlc_proc.terminate()
|
||||||
def config_verification(self):
|
vlc_proc.communicate()
|
||||||
|
vlc_version = vlc_version.decode().split("version ")[1].split('\n')[0]
|
||||||
if not self.dict["main"]["uuid"]:
|
except FileNotFoundError:
|
||||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
vlc_version = "Missing"
|
||||||
self.write('uuid', self.dict["main"]["uuid"], 'main')
|
print("Failed to find vlc.")
|
||||||
|
self.internal["versions"]["vlc"] = vlc_version
|
||||||
if self.dict["main"]["cache_dir"]:
|
|
||||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
|
||||||
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
|
||||||
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
|
||||||
cache_dir = self.internal["paths"]["cache_dir"]
|
|
||||||
|
|
||||||
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
|
||||||
self.internal["paths"]["logs_dir"] = logs_dir
|
|
||||||
if not logs_dir.is_dir():
|
|
||||||
logs_dir.mkdir()
|
|
||||||
|
|
||||||
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
|
||||||
|
|
||||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
|
||||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
|
||||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
|
||||||
self.dict["fhdhr"]["discovery_address"] = None
|
|
||||||
|
|
||||||
def get_real_conf_value(self, key, confvalue):
|
|
||||||
if not confvalue:
|
|
||||||
confvalue = None
|
|
||||||
elif key == "xmltv_offset":
|
|
||||||
confvalue = str(confvalue)
|
|
||||||
elif str(confvalue) in ["0"]:
|
|
||||||
confvalue = 0
|
|
||||||
elif isint(confvalue):
|
|
||||||
confvalue = int(confvalue)
|
|
||||||
elif isfloat(confvalue):
|
|
||||||
confvalue = float(confvalue)
|
|
||||||
elif is_arithmetic(confvalue):
|
|
||||||
confvalue = eval(confvalue)
|
|
||||||
elif "," in confvalue:
|
|
||||||
confvalue = confvalue.split(",")
|
|
||||||
elif str(confvalue).lower() in ["none", ""]:
|
|
||||||
confvalue = None
|
|
||||||
elif str(confvalue).lower() in ["false"]:
|
|
||||||
confvalue = False
|
|
||||||
elif str(confvalue).lower() in ["true"]:
|
|
||||||
confvalue = True
|
|
||||||
return confvalue
|
|
||||||
|
|
||||||
def read_json_config(self, conffilepath):
|
def read_json_config(self, conffilepath):
|
||||||
with open(conffilepath, 'r') as jsonconf:
|
with open(conffilepath, 'r') as jsonconf:
|
||||||
@ -221,13 +123,27 @@ class Config():
|
|||||||
if key not in list(self.conf_default[section].keys()):
|
if key not in list(self.conf_default[section].keys()):
|
||||||
self.conf_default[section][key] = {}
|
self.conf_default[section][key] = {}
|
||||||
|
|
||||||
confvalue = self.get_real_conf_value(key, confimport[section][key]["value"])
|
confvalue = confimport[section][key]["value"]
|
||||||
|
if isint(confvalue):
|
||||||
|
confvalue = int(confvalue)
|
||||||
|
elif isfloat(confvalue):
|
||||||
|
confvalue = float(confvalue)
|
||||||
|
elif is_arithmetic(confvalue):
|
||||||
|
confvalue = eval(confvalue)
|
||||||
|
elif "," in confvalue:
|
||||||
|
confvalue = confvalue.split(",")
|
||||||
|
elif str(confvalue).lower() in ["none"]:
|
||||||
|
confvalue = None
|
||||||
|
elif str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
|
||||||
self.dict[section][key] = confvalue
|
self.dict[section][key] = confvalue
|
||||||
|
|
||||||
self.conf_default[section][key]["value"] = confvalue
|
self.conf_default[section][key]["value"] = confvalue
|
||||||
|
|
||||||
for config_option in ["config_web_hidden", "config_file", "config_web", "required"]:
|
for config_option in ["config_web_hidden", "config_file", "config_web"]:
|
||||||
if config_option not in list(confimport[section][key].keys()):
|
if config_option not in list(confimport[section][key].keys()):
|
||||||
config_option_value = False
|
config_option_value = False
|
||||||
else:
|
else:
|
||||||
@ -247,7 +163,22 @@ class Config():
|
|||||||
if each_section.lower() not in list(self.dict.keys()):
|
if each_section.lower() not in list(self.dict.keys()):
|
||||||
self.dict[each_section.lower()] = {}
|
self.dict[each_section.lower()] = {}
|
||||||
for (each_key, each_val) in config_handler.items(each_section):
|
for (each_key, each_val) in config_handler.items(each_section):
|
||||||
each_val = self.get_real_conf_value(each_key, each_val)
|
if not each_val:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["none"]:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["false"]:
|
||||||
|
each_val = False
|
||||||
|
elif each_val.lower() in ["true"]:
|
||||||
|
each_val = True
|
||||||
|
elif isint(each_val):
|
||||||
|
each_val = int(each_val)
|
||||||
|
elif isfloat(each_val):
|
||||||
|
each_val = float(each_val)
|
||||||
|
elif is_arithmetic(each_val):
|
||||||
|
each_val = eval(each_val)
|
||||||
|
elif "," in each_val:
|
||||||
|
each_val = each_val.split(",")
|
||||||
|
|
||||||
import_val = True
|
import_val = True
|
||||||
if each_section in list(self.conf_default.keys()):
|
if each_section in list(self.conf_default.keys()):
|
||||||
@ -258,24 +189,11 @@ class Config():
|
|||||||
if import_val:
|
if import_val:
|
||||||
self.dict[each_section.lower()][each_key.lower()] = each_val
|
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
def write(self, key, value, section):
|
def write(self, section, key, value):
|
||||||
|
if section == self.dict["main"]["dictpopname"]:
|
||||||
if not value:
|
self.dict["origin"][key] = value
|
||||||
value = None
|
else:
|
||||||
if value.lower() in ["none"]:
|
self.dict[section][key] = value
|
||||||
value = None
|
|
||||||
elif value.lower() in ["false"]:
|
|
||||||
value = False
|
|
||||||
elif value.lower() in ["true"]:
|
|
||||||
value = True
|
|
||||||
elif isint(value):
|
|
||||||
value = int(value)
|
|
||||||
elif isfloat(value):
|
|
||||||
value = float(value)
|
|
||||||
elif isinstance(value, list):
|
|
||||||
",".join(value)
|
|
||||||
|
|
||||||
self.dict[section][key] = value
|
|
||||||
|
|
||||||
config_handler = configparser.ConfigParser()
|
config_handler = configparser.ConfigParser()
|
||||||
config_handler.read(self.config_file)
|
config_handler.read(self.config_file)
|
||||||
@ -283,11 +201,100 @@ class Config():
|
|||||||
if not config_handler.has_section(section):
|
if not config_handler.has_section(section):
|
||||||
config_handler.add_section(section)
|
config_handler.add_section(section)
|
||||||
|
|
||||||
config_handler.set(section, key, str(value))
|
config_handler.set(section, key, value)
|
||||||
|
|
||||||
with open(self.config_file, 'w') as config_file:
|
with open(self.config_file, 'w') as config_file:
|
||||||
config_handler.write(config_file)
|
config_handler.write(config_file)
|
||||||
|
|
||||||
|
def config_verification(self):
|
||||||
|
|
||||||
|
if self.dict["main"]["thread_method"] not in ["threading", "multiprocessing"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Threading Method. Exiting...")
|
||||||
|
|
||||||
|
if self.dict["main"]["required"]:
|
||||||
|
required_missing = []
|
||||||
|
if isinstance(self.dict["main"]["required"], str):
|
||||||
|
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||||
|
if len(self.dict["main"]["required"]):
|
||||||
|
for req_item in self.dict["main"]["required"]:
|
||||||
|
req_section = req_item.split("/")[0]
|
||||||
|
req_key = req_item.split("/")[1]
|
||||||
|
if not self.dict[req_section][req_key]:
|
||||||
|
required_missing.append(req_item)
|
||||||
|
if len(required_missing):
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
|
||||||
|
|
||||||
|
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||||
|
|
||||||
|
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
||||||
|
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
||||||
|
|
||||||
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
|
epg_methods = []
|
||||||
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
|
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||||
|
epg_methods.append("origin")
|
||||||
|
elif epg_method in ["None"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
elif epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
|
||||||
|
if not self.dict["main"]["uuid"]:
|
||||||
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
|
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
if self.dict["main"]["cache_dir"]:
|
||||||
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
|
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
|
cache_dir = self.internal["paths"]["cache_dir"]
|
||||||
|
|
||||||
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
|
self.internal["paths"]["logs_dir"] = logs_dir
|
||||||
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
|
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
|
|
||||||
|
def logging_setup(self):
|
||||||
|
|
||||||
|
log_level = self.dict["logging"]["level"].upper()
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||||
|
logger = logging.getLogger('fHDHR')
|
||||||
|
log_file = os.path.join(self.internal["paths"]["logs_dir"], 'fHDHR.log')
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler(log_file)
|
||||||
|
# c_handler.setLevel(log_level)
|
||||||
|
f_handler.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create formatters and add it to handlers
|
||||||
|
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
# c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
# logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
if name in list(self.dict.keys()):
|
if name in list(self.dict.keys()):
|
||||||
|
|||||||
@ -32,10 +32,28 @@ MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
|||||||
'mysql_collate': 'utf8mb4_unicode_ci'}
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
class PluginValues(BASE):
|
class ChannelValues(BASE):
|
||||||
__tablename__ = 'plugin_values'
|
__tablename__ = 'channel_values'
|
||||||
__table_args__ = MYSQL_TABLE_ARGS
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
pluginitem = Column(String(255), primary_key=True)
|
channel = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class ProgramValues(BASE):
|
||||||
|
__tablename__ = 'program_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
program = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class CacheValues(BASE):
|
||||||
|
__tablename__ = 'cache_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
cacheitem = Column(String(255), primary_key=True)
|
||||||
namespace = Column(String(255), primary_key=True)
|
namespace = Column(String(255), primary_key=True)
|
||||||
key = Column(String(255), primary_key=True)
|
key = Column(String(255), primary_key=True)
|
||||||
value = Column(Text())
|
value = Column(Text())
|
||||||
@ -130,6 +148,198 @@ class fHDHRdb(object):
|
|||||||
def get_uri(self):
|
def get_uri(self):
|
||||||
return self.url
|
return self.url
|
||||||
|
|
||||||
|
# Channel Values
|
||||||
|
|
||||||
|
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_channelvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Program Values
|
||||||
|
|
||||||
|
def set_program_value(self, program, key, value, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_programvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Cache Values
|
||||||
|
|
||||||
|
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
# fHDHR Values
|
# fHDHR Values
|
||||||
|
|
||||||
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
@ -148,8 +358,8 @@ class fHDHRdb(object):
|
|||||||
session.commit()
|
session.commit()
|
||||||
# DNE - Insert
|
# DNE - Insert
|
||||||
else:
|
else:
|
||||||
new_pluginitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
session.add(new_pluginitemvalue)
|
session.add(new_cacheitemvalue)
|
||||||
session.commit()
|
session.commit()
|
||||||
except SQLAlchemyError:
|
except SQLAlchemyError:
|
||||||
session.rollback()
|
session.rollback()
|
||||||
@ -193,67 +403,3 @@ class fHDHRdb(object):
|
|||||||
raise
|
raise
|
||||||
finally:
|
finally:
|
||||||
session.close()
|
session.close()
|
||||||
|
|
||||||
# Plugin Values
|
|
||||||
|
|
||||||
def set_plugin_value(self, pluginitem, key, value, namespace='default'):
|
|
||||||
pluginitem = pluginitem.lower()
|
|
||||||
value = json.dumps(value, ensure_ascii=False)
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(PluginValues) \
|
|
||||||
.filter(PluginValues.pluginitem == pluginitem)\
|
|
||||||
.filter(PluginValues.namespace == namespace)\
|
|
||||||
.filter(PluginValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, update
|
|
||||||
if result:
|
|
||||||
result.value = value
|
|
||||||
session.commit()
|
|
||||||
# DNE - Insert
|
|
||||||
else:
|
|
||||||
new_pluginitemvalue = PluginValues(pluginitem=pluginitem, namespace=namespace, key=key, value=value)
|
|
||||||
session.add(new_pluginitemvalue)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def get_plugin_value(self, pluginitem, key, namespace='default'):
|
|
||||||
pluginitem = pluginitem.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(PluginValues) \
|
|
||||||
.filter(PluginValues.pluginitem == pluginitem)\
|
|
||||||
.filter(PluginValues.namespace == namespace)\
|
|
||||||
.filter(PluginValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
if result is not None:
|
|
||||||
result = result.value
|
|
||||||
return _deserialize(result)
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|
||||||
def delete_plugin_value(self, pluginitem, key, namespace='default'):
|
|
||||||
pluginitem = pluginitem.lower()
|
|
||||||
session = self.ssession()
|
|
||||||
try:
|
|
||||||
result = session.query(PluginValues) \
|
|
||||||
.filter(PluginValues.pluginitem == pluginitem)\
|
|
||||||
.filter(PluginValues.namespace == namespace)\
|
|
||||||
.filter(PluginValues.key == key) \
|
|
||||||
.one_or_none()
|
|
||||||
# ProgramValue exists, delete
|
|
||||||
if result:
|
|
||||||
session.delete(result)
|
|
||||||
session.commit()
|
|
||||||
except SQLAlchemyError:
|
|
||||||
session.rollback()
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
session.close()
|
|
||||||
|
|||||||
@ -2,33 +2,25 @@ from .channels import Channels
|
|||||||
from .epg import EPG
|
from .epg import EPG
|
||||||
from .tuners import Tuners
|
from .tuners import Tuners
|
||||||
from .images import imageHandler
|
from .images import imageHandler
|
||||||
|
from .station_scan import Station_Scan
|
||||||
from .ssdp import SSDPServer
|
from .ssdp import SSDPServer
|
||||||
|
from .cluster import fHDHR_Cluster
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Device():
|
class fHDHR_Device():
|
||||||
|
|
||||||
def __init__(self, fhdhr, origins):
|
def __init__(self, fhdhr, origin):
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.channels = Channels(fhdhr, origins)
|
self.channels = Channels(fhdhr, origin)
|
||||||
|
|
||||||
self.epg = EPG(fhdhr, self.channels, origins)
|
self.epg = EPG(fhdhr, self.channels, origin)
|
||||||
|
|
||||||
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||||
|
|
||||||
self.images = imageHandler(fhdhr, self.epg)
|
self.images = imageHandler(fhdhr, self.epg)
|
||||||
|
|
||||||
|
self.station_scan = Station_Scan(fhdhr, self.channels)
|
||||||
|
|
||||||
self.ssdp = SSDPServer(fhdhr)
|
self.ssdp = SSDPServer(fhdhr)
|
||||||
|
|
||||||
self.interfaces = {}
|
self.cluster = fHDHR_Cluster(fhdhr, self.ssdp)
|
||||||
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].manifest["type"] == "interface":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
|
||||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
|
||||||
plugin_utils.channels = self.channels
|
|
||||||
plugin_utils.epg = self.epg
|
|
||||||
plugin_utils.tuners = self.tuners
|
|
||||||
plugin_utils.images = self.images
|
|
||||||
plugin_utils.ssdp = self.ssdp
|
|
||||||
self.interfaces[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, plugin_utils)
|
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
|
import datetime
|
||||||
import time
|
import time
|
||||||
|
|
||||||
from fHDHR.tools import humanized_time
|
from fHDHR.tools import hours_between_datetime
|
||||||
|
|
||||||
from .channel import Channel
|
from .channel import Channel
|
||||||
from .chan_ident import Channel_IDs
|
from .chan_ident import Channel_IDs
|
||||||
@ -8,111 +9,47 @@ from .chan_ident import Channel_IDs
|
|||||||
|
|
||||||
class Channels():
|
class Channels():
|
||||||
|
|
||||||
def __init__(self, fhdhr, origins):
|
def __init__(self, fhdhr, origin):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.origins = origins
|
self.origin = origin
|
||||||
|
|
||||||
self.id_system = Channel_IDs(fhdhr, origins)
|
self.id_system = Channel_IDs(fhdhr)
|
||||||
|
|
||||||
self.list = {}
|
self.list = {}
|
||||||
for origin in list(self.origins.origins_dict.keys()):
|
self.list_update_time = None
|
||||||
self.list[origin] = {}
|
|
||||||
|
|
||||||
self.get_db_channels()
|
self.get_db_channels()
|
||||||
|
haseverscanned = self.fhdhr.db.get_fhdhr_value("channels", "scanned_time")
|
||||||
|
if (self.fhdhr.config.dict["fhdhr"]["chanscan_on_start"] or not haseverscanned):
|
||||||
|
self.get_channels()
|
||||||
|
|
||||||
def get_channel_obj(self, keyfind, valfind, origin=None):
|
def get_channel_obj(self, keyfind, valfind):
|
||||||
if origin:
|
return next(self.list[fhdhr_id] for fhdhr_id in list(self.list.keys()) if self.list[fhdhr_id].dict[keyfind] == valfind)
|
||||||
origin = origin.lower()
|
|
||||||
if keyfind == "number":
|
|
||||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
|
||||||
else:
|
|
||||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
|
||||||
if len(matches):
|
|
||||||
return self.list[origin][matches[0]]
|
|
||||||
else:
|
|
||||||
matches = []
|
|
||||||
for origin in list(self.list.keys()):
|
|
||||||
if keyfind == "number":
|
|
||||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
|
||||||
else:
|
|
||||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
|
||||||
if len(matches):
|
|
||||||
return self.list[origin][matches[0]]
|
|
||||||
if len(matches):
|
|
||||||
return self.list[origin][matches[0]]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_channel_list(self, keyfind, origin=None):
|
def get_channel_list(self, keyfind):
|
||||||
if origin:
|
return [self.list[x].dict[keyfind] for x in list(self.list.keys())]
|
||||||
if keyfind == "number":
|
|
||||||
return [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
|
||||||
else:
|
|
||||||
return [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
|
||||||
else:
|
|
||||||
matches = []
|
|
||||||
for origin in list(self.list.keys()):
|
|
||||||
if keyfind == "number":
|
|
||||||
next_match = [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
|
||||||
else:
|
|
||||||
next_match = [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
|
||||||
if len(next_match):
|
|
||||||
matches.append(next_match)
|
|
||||||
return matches[0]
|
|
||||||
|
|
||||||
def get_channel_dict(self, keyfind, valfind, origin=None):
|
def set_channel_status(self, keyfind, valfind, updatedict):
|
||||||
chan_obj = self.get_channel_obj(keyfind, valfind, origin)
|
self.get_channel_obj(keyfind, valfind).set_status(updatedict)
|
||||||
if chan_obj:
|
|
||||||
return chan_obj.dict
|
|
||||||
return None
|
|
||||||
|
|
||||||
def set_channel_status(self, keyfind, valfind, updatedict, origin):
|
def set_channel_enablement(self, keyfind, valfind, enablement):
|
||||||
self.get_channel_obj(keyfind, valfind, origin).set_status(updatedict)
|
self.get_channel_obj(keyfind, valfind).set_enablement(enablement)
|
||||||
|
|
||||||
def set_channel_enablement_all(self, enablement, origin):
|
def set_channel_favorite(self, keyfind, valfind, enablement):
|
||||||
for fhdhr_id in [x["id"] for x in self.get_channels(origin)]:
|
self.get_channel_obj(keyfind, valfind).set_favorite(enablement)
|
||||||
self.list[fhdhr_id].set_enablement(enablement, origin)
|
|
||||||
|
|
||||||
def set_channel_enablement(self, keyfind, valfind, enablement, origin):
|
def get_db_channels(self):
|
||||||
self.get_channel_obj(keyfind, valfind, origin).set_enablement(enablement)
|
self.fhdhr.logger.info("Checking for Channel information stored in the database.")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if len(channel_ids):
|
||||||
|
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||||
|
for channel_id in channel_ids:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, channel_id=channel_id)
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
def set_channel_favorite(self, keyfind, valfind, enablement, origin):
|
def get_channels(self, forceupdate=False):
|
||||||
self.get_channel_obj(keyfind, valfind, origin).set_favorite(enablement)
|
|
||||||
|
|
||||||
def get_db_channels(self, origin=None):
|
|
||||||
|
|
||||||
if not origin:
|
|
||||||
origins_list = list(self.list.keys())
|
|
||||||
else:
|
|
||||||
origins_list = origin.lower()
|
|
||||||
|
|
||||||
if isinstance(origins_list, str):
|
|
||||||
origins_list = [origins_list]
|
|
||||||
|
|
||||||
for origin in origins_list:
|
|
||||||
self.fhdhr.logger.info("Checking for %s Channel information stored in the database." % origin)
|
|
||||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
|
||||||
if len(channel_ids):
|
|
||||||
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
|
||||||
for channel_id in channel_ids:
|
|
||||||
channel_obj = Channel(self.fhdhr, self.id_system, origin=origin, channel_id=channel_id)
|
|
||||||
channel_id = channel_obj.dict["id"]
|
|
||||||
self.list[origin][channel_id] = channel_obj
|
|
||||||
|
|
||||||
def save_db_channels(self, origin=None):
|
|
||||||
if not origin:
|
|
||||||
origins_list = list(self.list.keys())
|
|
||||||
else:
|
|
||||||
origins_list = origin.lower()
|
|
||||||
|
|
||||||
if isinstance(origins_list, str):
|
|
||||||
origins_list = [origins_list]
|
|
||||||
|
|
||||||
for origin in origins_list:
|
|
||||||
channel_ids = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys())]
|
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, origin)
|
|
||||||
|
|
||||||
def get_channels(self, origin=None, forceupdate=False):
|
|
||||||
"""Pull Channels from origin.
|
"""Pull Channels from origin.
|
||||||
|
|
||||||
Output a list.
|
Output a list.
|
||||||
@ -120,64 +57,35 @@ class Channels():
|
|||||||
Don't pull more often than 12 hours.
|
Don't pull more often than 12 hours.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not origin:
|
updatelist = False
|
||||||
origins_list = list(self.list.keys())
|
if not self.list_update_time:
|
||||||
else:
|
updatelist = True
|
||||||
origins_list = origin.lower().lower()
|
elif hours_between_datetime(self.list_update_time, datetime.datetime.now()) > 12:
|
||||||
|
updatelist = True
|
||||||
|
elif forceupdate:
|
||||||
|
updatelist = True
|
||||||
|
|
||||||
if isinstance(origins_list, str):
|
if updatelist:
|
||||||
origins_list = [origins_list]
|
self.fhdhr.logger.info("Performing Channel Scan.")
|
||||||
|
channel_dict_list = self.origin.get_channels()
|
||||||
|
for channel_info in channel_dict_list:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin_id=channel_info["id"])
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
channel_obj.basics(channel_info)
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
return_chan_list = []
|
if not self.list_update_time:
|
||||||
for origin in origins_list:
|
self.fhdhr.logger.info("Found " + str(len(self.list)) + " channels for " + str(self.fhdhr.config.dict["main"]["servicename"]))
|
||||||
|
self.list_update_time = datetime.datetime.now()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time())
|
||||||
|
|
||||||
if not len(list(self.list[origin].keys())):
|
channel_list = []
|
||||||
self.get_db_channels(origin=origin)
|
for chan_obj in list(self.list.keys()):
|
||||||
|
channel_list.append(self.list[chan_obj].dict)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
if not forceupdate:
|
def get_channel_stream(self, channel_number):
|
||||||
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
return self.origin.get_channel_stream(self.get_channel_dict("number", channel_number))
|
||||||
|
|
||||||
else:
|
def get_channel_dict(self, keyfind, valfind):
|
||||||
|
return self.get_channel_obj(keyfind, valfind).dict
|
||||||
channel_origin_id_list = [str(self.list[origin][x].dict["origin_id"]) for x in list(self.list[origin].keys())]
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Performing Channel Scan for %s." % origin)
|
|
||||||
|
|
||||||
channel_dict_list = self.origins.origins_dict[origin].get_channels()
|
|
||||||
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), origin))
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
|
|
||||||
|
|
||||||
newchan = 0
|
|
||||||
chan_scan_start = time.time()
|
|
||||||
for channel_info in channel_dict_list:
|
|
||||||
|
|
||||||
chan_existing = str(channel_info["id"]) in channel_origin_id_list
|
|
||||||
|
|
||||||
if chan_existing:
|
|
||||||
channel_obj = self.get_channel_obj("origin_id", channel_info["id"], origin)
|
|
||||||
else:
|
|
||||||
channel_obj = Channel(self.fhdhr, self.id_system, origin, origin_id=channel_info["id"])
|
|
||||||
|
|
||||||
channel_id = channel_obj.dict["id"]
|
|
||||||
channel_obj.basics(channel_info)
|
|
||||||
if not chan_existing:
|
|
||||||
self.list[origin][channel_id] = channel_obj
|
|
||||||
newchan += 1
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("%s Channel Import took %s" % (origin, humanized_time(time.time() - chan_scan_start)))
|
|
||||||
|
|
||||||
if not newchan:
|
|
||||||
newchan = "no"
|
|
||||||
self.fhdhr.logger.info("Found %s NEW channels for %s." % (newchan, origin))
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Total %s Channel Count: %s" % (origin, len(self.list[origin].keys())))
|
|
||||||
self.save_db_channels(origin=origin)
|
|
||||||
|
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time(), origin)
|
|
||||||
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
|
||||||
|
|
||||||
return return_chan_list
|
|
||||||
|
|
||||||
def get_channel_stream(self, stream_args, origin):
|
|
||||||
return self.origins.origins_dict[origin].get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)
|
|
||||||
|
|||||||
@ -2,44 +2,36 @@ import uuid
|
|||||||
|
|
||||||
|
|
||||||
class Channel_IDs():
|
class Channel_IDs():
|
||||||
def __init__(self, fhdhr, origins):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.origins = origins
|
|
||||||
|
|
||||||
def get(self, origin_id, origin):
|
def get(self, origin_id):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
for existing_channel in existing_channel_info:
|
for existing_channel in existing_channel_info:
|
||||||
if existing_channel["origin_id"] == origin_id:
|
if existing_channel["origin_id"] == origin_id:
|
||||||
return existing_channel["id"]
|
return existing_channel["id"]
|
||||||
return self.assign(origin)
|
return self.assign()
|
||||||
|
|
||||||
def assign(self, origin):
|
def assign(self):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
channel_id = None
|
channel_id = None
|
||||||
while not channel_id:
|
while not channel_id:
|
||||||
unique_id = str(uuid.uuid4())
|
unique_id = str(uuid.uuid4())
|
||||||
if str(unique_id) not in existing_ids:
|
if str(unique_id) not in existing_ids:
|
||||||
channel_id = str(unique_id)
|
channel_id = str(unique_id)
|
||||||
existing_ids.append(channel_id)
|
existing_ids.append(channel_id)
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids, origin)
|
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids)
|
||||||
return channel_id
|
return channel_id
|
||||||
|
|
||||||
def get_number(self, channel_id, origin):
|
def get_number(self, channel_id):
|
||||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||||
if cnumber:
|
if cnumber:
|
||||||
return cnumber
|
return cnumber
|
||||||
|
|
||||||
used_numbers = []
|
used_numbers = [existing_channel["number"] for existing_channel in existing_channel_info]
|
||||||
for existing_channel in existing_channel_info:
|
|
||||||
if existing_channel["subnumber"]:
|
|
||||||
number = "%s.%s" % (existing_channel["number"], existing_channel["subnumber"])
|
|
||||||
else:
|
|
||||||
number = existing_channel["number"]
|
|
||||||
used_numbers.append(number)
|
|
||||||
|
|
||||||
for i in range(1000, 2000):
|
for i in range(1000, 2000):
|
||||||
if str(float(i)) not in used_numbers:
|
if str(float(i)) not in used_numbers:
|
||||||
break
|
break
|
||||||
|
|||||||
@ -1,116 +1,60 @@
|
|||||||
import time
|
|
||||||
|
|
||||||
|
|
||||||
class Channel():
|
class Channel():
|
||||||
|
|
||||||
def __init__(self, fhdhr, id_system, origin, origin_id=None, channel_id=None):
|
def __init__(self, fhdhr, id_system, origin_id=None, channel_id=None):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.origin = origin
|
|
||||||
|
|
||||||
self.id_system = id_system
|
self.id_system = id_system
|
||||||
|
|
||||||
if not channel_id:
|
if not channel_id:
|
||||||
if origin_id:
|
if origin_id:
|
||||||
channel_id = id_system.get(origin_id, origin)
|
channel_id = id_system.get(origin_id)
|
||||||
else:
|
else:
|
||||||
channel_id = id_system.assign(origin)
|
channel_id = id_system.assign()
|
||||||
self.channel_id = channel_id
|
self.dict = self.fhdhr.db.get_channel_value(str(channel_id), "dict") or self.default_dict(channel_id)
|
||||||
|
|
||||||
self.dict = self.fhdhr.db.get_fhdhr_value(str(channel_id), "dict", self.origin) or self.default_dict
|
|
||||||
self.verify_dict()
|
self.verify_dict()
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def number(self):
|
|
||||||
if self.dict["subnumber"]:
|
|
||||||
return "%s.%s" % (self.dict["number"], self.dict["subnumber"])
|
|
||||||
else:
|
|
||||||
return self.dict["number"]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def thumbnail(self):
|
|
||||||
if str(self.dict["thumbnail"]).lower() in ["none"]:
|
|
||||||
return self.generic_image_url
|
|
||||||
elif self.dict["thumbnail"]:
|
|
||||||
return self.dict["thumbnail"]
|
|
||||||
elif self.dict["origin_thumbnail"]:
|
|
||||||
return self.dict["origin_thumbnail"]
|
|
||||||
else:
|
|
||||||
return self.generic_image_url
|
|
||||||
|
|
||||||
@property
|
|
||||||
def epgdict(self):
|
|
||||||
return {
|
|
||||||
"callsign": self.dict["callsign"],
|
|
||||||
"name": self.dict["name"],
|
|
||||||
"number": self.number,
|
|
||||||
"id": self.dict["origin_id"],
|
|
||||||
"thumbnail": self.thumbnail,
|
|
||||||
"listing": [],
|
|
||||||
}
|
|
||||||
|
|
||||||
def verify_dict(self):
|
def verify_dict(self):
|
||||||
"""Development Purposes
|
"""Development Purposes
|
||||||
Add new Channel dict keys
|
Add new Channel dict keys
|
||||||
"""
|
"""
|
||||||
default_dict = self.default_dict
|
default_dict = self.default_dict(self.dict["id"])
|
||||||
for key in list(default_dict.keys()):
|
for key in list(default_dict.keys()):
|
||||||
if key not in list(self.dict.keys()):
|
if key not in list(self.dict.keys()):
|
||||||
self.dict[key] = default_dict[key]
|
self.dict[key] = default_dict[key]
|
||||||
if self.dict["number"]:
|
|
||||||
if "." in self.dict["number"]:
|
|
||||||
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
|
||||||
self.dict["number"] = self.dict["number"].split(".")[0]
|
|
||||||
|
|
||||||
def basics(self, channel_info):
|
def basics(self, channel_info):
|
||||||
"""Some Channel Information is Critical"""
|
"""Some Channel Information is Critical"""
|
||||||
|
|
||||||
if "name" not in list(channel_info.keys()):
|
if "name" not in list(channel_info.keys()):
|
||||||
channel_info["name"] = self.dict["id"]
|
channel_info["name"] = self.dict["id"]
|
||||||
elif not channel_info["name"]:
|
|
||||||
channel_info["name"] = self.dict["id"]
|
|
||||||
self.dict["origin_name"] = channel_info["name"]
|
self.dict["origin_name"] = channel_info["name"]
|
||||||
if not self.dict["name"]:
|
if not self.dict["name"]:
|
||||||
self.dict["name"] = self.dict["origin_name"]
|
self.dict["name"] = self.dict["origin_name"]
|
||||||
|
|
||||||
if "id" not in list(channel_info.keys()):
|
if "id" not in list(channel_info.keys()):
|
||||||
channel_info["id"] = channel_info["name"]
|
channel_info["id"] = channel_info["name"]
|
||||||
elif not channel_info["id"]:
|
|
||||||
channel_info["id"] = channel_info["name"]
|
|
||||||
self.dict["origin_id"] = channel_info["id"]
|
self.dict["origin_id"] = channel_info["id"]
|
||||||
|
|
||||||
if "callsign" not in list(channel_info.keys()):
|
if "callsign" not in list(channel_info.keys()):
|
||||||
channel_info["callsign"] = channel_info["name"]
|
channel_info["callsign"] = channel_info["name"]
|
||||||
elif not channel_info["callsign"]:
|
|
||||||
channel_info["callsign"] = channel_info["name"]
|
|
||||||
self.dict["origin_callsign"] = channel_info["callsign"]
|
self.dict["origin_callsign"] = channel_info["callsign"]
|
||||||
if not self.dict["callsign"]:
|
if not self.dict["callsign"]:
|
||||||
self.dict["callsign"] = self.dict["origin_callsign"]
|
self.dict["callsign"] = self.dict["origin_callsign"]
|
||||||
|
|
||||||
if "tags" not in list(channel_info.keys()):
|
if "tags" not in list(channel_info.keys()):
|
||||||
channel_info["tags"] = []
|
channel_info["tags"] = []
|
||||||
elif not channel_info["tags"]:
|
|
||||||
channel_info["tags"] = []
|
|
||||||
self.dict["origin_tags"] = channel_info["tags"]
|
self.dict["origin_tags"] = channel_info["tags"]
|
||||||
if not self.dict["tags"]:
|
if not self.dict["tags"]:
|
||||||
self.dict["tags"] = self.dict["origin_tags"]
|
self.dict["tags"] = self.dict["origin_tags"]
|
||||||
|
|
||||||
if "number" not in list(channel_info.keys()):
|
if "number" not in list(channel_info.keys()):
|
||||||
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
||||||
elif not channel_info["number"]:
|
self.dict["origin_number"] = str(float(channel_info["number"]))
|
||||||
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
|
||||||
self.dict["origin_number"] = str(channel_info["number"])
|
|
||||||
if not self.dict["number"]:
|
if not self.dict["number"]:
|
||||||
self.dict["number"] = self.dict["origin_number"].split(".")[0]
|
self.dict["number"] = self.dict["origin_number"]
|
||||||
try:
|
|
||||||
self.dict["subnumber"] = self.dict["origin_number"].split(".")[1]
|
|
||||||
except IndexError:
|
|
||||||
self.dict["subnumber"] = None
|
|
||||||
else:
|
|
||||||
if "." in self.dict["number"]:
|
|
||||||
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
|
||||||
self.dict["number"] = self.dict["number"].split(".")[0]
|
|
||||||
|
|
||||||
if "thumbnail" not in list(channel_info.keys()):
|
if "thumbnail" not in list(channel_info.keys()):
|
||||||
channel_info["thumbnail"] = None
|
channel_info["thumbnail"] = None
|
||||||
@ -122,22 +66,14 @@ class Channel():
|
|||||||
channel_info["HD"] = 0
|
channel_info["HD"] = 0
|
||||||
self.dict["HD"] = channel_info["HD"]
|
self.dict["HD"] = channel_info["HD"]
|
||||||
|
|
||||||
if "enabled" in list(channel_info.keys()):
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
if "created" not in list(self.dict.keys()):
|
|
||||||
self.dict["enabled"] = channel_info["enabled"]
|
|
||||||
|
|
||||||
if "created" not in list(self.dict.keys()):
|
def default_dict(self, channel_id):
|
||||||
self.dict["created"] = time.time()
|
|
||||||
|
|
||||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default_dict(self):
|
|
||||||
return {
|
return {
|
||||||
"id": str(self.channel_id), "origin_id": None,
|
"id": str(channel_id), "origin_id": None,
|
||||||
"name": None, "origin_name": None,
|
"name": None, "origin_name": None,
|
||||||
"callsign": None, "origin_callsign": None,
|
"callsign": None, "origin_callsign": None,
|
||||||
"number": None, "subnumber": None, "origin_number": None,
|
"number": None, "origin_number": None,
|
||||||
"tags": [], "origin_tags": [],
|
"tags": [], "origin_tags": [],
|
||||||
"thumbnail": None, "origin_thumbnail": None,
|
"thumbnail": None, "origin_thumbnail": None,
|
||||||
"enabled": True, "favorite": 0,
|
"enabled": True, "favorite": 0,
|
||||||
@ -145,37 +81,41 @@ class Channel():
|
|||||||
}
|
}
|
||||||
|
|
||||||
def destroy(self):
|
def destroy(self):
|
||||||
self.fhdhr.db.delete_fhdhr_value(self.dict["id"], "dict", self.origin)
|
self.fhdhr.db.delete_channel_value(self.dict["id"], "dict")
|
||||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
if self.dict["id"] in channel_ids:
|
if self.dict["id"] in channel_ids:
|
||||||
channel_ids.remove(self.dict["id"])
|
channel_ids.remove(self.dict["id"])
|
||||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, self.origin)
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
||||||
|
|
||||||
def set_status(self, updatedict):
|
def set_status(self, updatedict):
|
||||||
for key in list(updatedict.keys()):
|
for key in list(updatedict.keys()):
|
||||||
if key == "number":
|
if key == "number":
|
||||||
updatedict[key] = str(updatedict[key])
|
updatedict[key] = str(float(updatedict[key]))
|
||||||
self.dict[key] = updatedict[key]
|
self.dict[key] = updatedict[key]
|
||||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
@property
|
def lineup_dict(self):
|
||||||
def generic_image_url(self):
|
return {
|
||||||
return "/api/images?method=generate&type=channel&message=%s" % self.number
|
'GuideNumber': self.dict['number'],
|
||||||
|
'GuideName': self.dict['name'],
|
||||||
|
'Tags': ",".join(self.dict['tags']),
|
||||||
|
'URL': self.stream_url(),
|
||||||
|
'HD': self.dict["HD"],
|
||||||
|
"Favorite": self.dict["favorite"],
|
||||||
|
}
|
||||||
|
|
||||||
@property
|
def stream_url(self):
|
||||||
def api_stream_url(self):
|
return ('/auto/v%s' % self.dict['number'])
|
||||||
return '/api/tuners?method=stream&stream_method=%s&channel=%s&origin=%s' % (self.fhdhr.origins.origins_dict[self.origin].stream_method, self.dict["id"], self.origin)
|
|
||||||
|
|
||||||
@property
|
def play_url(self):
|
||||||
def api_m3u_url(self):
|
return ('/api/m3u?method=get&channel=%s' % self.dict['number'])
|
||||||
return '/api/m3u?method=get&channel=%s&origin=%s' % (self.dict["id"], self.origin)
|
|
||||||
|
|
||||||
def set_favorite(self, enablement):
|
def set_favorite(self, enablement):
|
||||||
if enablement == "+":
|
if enablement == "+":
|
||||||
self.dict["favorite"] = 1
|
self.dict["favorite"] = 1
|
||||||
elif enablement == "-":
|
elif enablement == "+":
|
||||||
self.dict["favorite"] = 0
|
self.dict["favorite"] = 0
|
||||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
self.fhdhr.db.set_channel_value(self.dict["fhdhr_id"], "info", self.dict)
|
||||||
|
|
||||||
def set_enablement(self, enablement):
|
def set_enablement(self, enablement):
|
||||||
if enablement == "disable":
|
if enablement == "disable":
|
||||||
@ -187,7 +127,7 @@ class Channel():
|
|||||||
self.dict["enabled"] = False
|
self.dict["enabled"] = False
|
||||||
else:
|
else:
|
||||||
self.dict["enabled"] = True
|
self.dict["enabled"] = True
|
||||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
self.fhdhr.db.set_channel_value(self.dict["fhdhr_id"], "info", self.dict)
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
''' will only get called for undefined attributes '''
|
''' will only get called for undefined attributes '''
|
||||||
|
|||||||
165
fHDHR/device/cluster.py
Normal file
165
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
import urllib.parse
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Cluster():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, ssdp):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp = ssdp
|
||||||
|
|
||||||
|
self.friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
self.location = None
|
||||||
|
self.location_url = None
|
||||||
|
|
||||||
|
if fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.location = ('http://' + fhdhr.config.dict["fhdhr"]["discovery_address"] + ':' +
|
||||||
|
str(fhdhr.config.dict["fhdhr"]["port"]))
|
||||||
|
self.location_url = urllib.parse.quote(self.location)
|
||||||
|
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_cluster_dicts_web(self):
|
||||||
|
fhdhr_list = self.cluster()
|
||||||
|
locations = []
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
item_dict = {
|
||||||
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
|
"name": fhdhr_list[location]["name"]
|
||||||
|
}
|
||||||
|
if item_dict["base_url"] != self.location:
|
||||||
|
locations.append(item_dict)
|
||||||
|
if len(locations):
|
||||||
|
locations = sorted(locations, key=lambda i: i['name'])
|
||||||
|
return locations
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.ssdp.detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.location] = {
|
||||||
|
"base_url": self.location,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
self.fhdhr.logger.info("Syncronizing with Cluster.")
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if not len(list(cluster.keys())):
|
||||||
|
self.fhdhr.logger.info("No Cluster Found.")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
self.fhdhr.logger.info("Checking Cluster Syncronization information from %s." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.location not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.fhdhr.logger.info("Leaving cluster.")
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
self.fhdhr.logger.info("Informing %s that I am departing the Cluster." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=del&location=" + self.location
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
sync_url = location + "/api/cluster?method=sync&location=" + self.location_url
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Adding %s to cluster." % location)
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
try:
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["FriendlyName"]
|
||||||
|
|
||||||
|
cluster_info_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.fhdhr.web.session.get(cluster_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Removing %s from cluster." % location)
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = location + "/api/cluster?method=leave"
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.push_sync()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
187
fHDHR/device/epg.py
Normal file
187
fHDHR/device/epg.py
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
epgtype_list = []
|
||||||
|
device_dir = os.path.dirname(__file__)
|
||||||
|
for entry in os.scandir(device_dir + '/epgtypes'):
|
||||||
|
if entry.is_file():
|
||||||
|
if entry.name[0] != '_':
|
||||||
|
epgtype_list.append(str(entry.name[:-3]))
|
||||||
|
impstring = f'from .epgtypes import {entry.name}'[:-3]
|
||||||
|
exec(impstring)
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = origin
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.epgdict = {}
|
||||||
|
|
||||||
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
|
self.epg_methods = self.fhdhr.config.dict["epg"]["method"]
|
||||||
|
self.valid_epg_methods = [x for x in self.fhdhr.config.dict["main"]["valid_epg_methods"] if x and x not in [None, "None"]]
|
||||||
|
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||||
|
self.sleeptime = {}
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||||
|
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
||||||
|
if epg_method not in list(self.sleeptime.keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
||||||
|
|
||||||
|
def clear_epg_cache(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Clearing " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
method_to_call = getattr(self, method)
|
||||||
|
if hasattr(method_to_call, 'clear_cache'):
|
||||||
|
func_to_call = getattr(method_to_call, 'clear_cache')
|
||||||
|
func_to_call()
|
||||||
|
|
||||||
|
if method in list(self.epgdict.keys()):
|
||||||
|
del self.epgdict[method]
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
||||||
|
|
||||||
|
def whats_on_now(self, channel, method=None):
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
listings = epgdict[channel]["listing"]
|
||||||
|
for listing in listings:
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
||||||
|
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
if start_time <= nowtime <= end_time:
|
||||||
|
epgitem = epgdict[channel].copy()
|
||||||
|
epgitem["listing"] = [listing]
|
||||||
|
return epgitem
|
||||||
|
return None
|
||||||
|
|
||||||
|
def whats_on_allchans(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
channel_guide_list = []
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
channels = list(epgdict.keys())
|
||||||
|
for channel in channels:
|
||||||
|
whatson = self.whats_on_now(epgdict[channel]["number"], method)
|
||||||
|
if whatson:
|
||||||
|
channel_guide_list.append(whatson)
|
||||||
|
return channel_guide_list
|
||||||
|
|
||||||
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
if method not in list(self.epgdict.keys()):
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or None
|
||||||
|
if not epgdict:
|
||||||
|
self.update(method)
|
||||||
|
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
||||||
|
else:
|
||||||
|
self.epgdict[method] = epgdict
|
||||||
|
return self.epgdict[method]
|
||||||
|
else:
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
def get_thumbnail(self, itemtype, itemid):
|
||||||
|
if itemtype == "channel":
|
||||||
|
chandict = self.find_channel_dict(itemid)
|
||||||
|
return chandict["thumbnail"]
|
||||||
|
elif itemtype == "content":
|
||||||
|
progdict = self.find_program_dict(itemid)
|
||||||
|
return progdict["thumbnail"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_channel_dict(self, channel_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
channel_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
channel_list.append(epgdict[channel])
|
||||||
|
return next(item for item in channel_list if item["id"] == channel_id)
|
||||||
|
|
||||||
|
def find_program_dict(self, event_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
event_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
event_list.extend(epgdict[channel]["listing"])
|
||||||
|
return next(item for item in event_list if item["id"] == event_id)
|
||||||
|
|
||||||
|
def epg_method_selfadd(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Optional EPG methods.")
|
||||||
|
for method in epgtype_list:
|
||||||
|
self.fhdhr.logger.info("Found %s EPG method." % method)
|
||||||
|
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.fhdhr, self.channels)"))
|
||||||
|
|
||||||
|
def update(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Updating " + epgtypename + " EPG cache.")
|
||||||
|
method_to_call = getattr(self, method)
|
||||||
|
func_to_call = getattr(method_to_call, 'update_epg')
|
||||||
|
if method == 'origin':
|
||||||
|
programguide = func_to_call(self.channels)
|
||||||
|
else:
|
||||||
|
programguide = func_to_call()
|
||||||
|
|
||||||
|
for chan in list(programguide.keys()):
|
||||||
|
floatnum = str(float(chan))
|
||||||
|
programguide[floatnum] = programguide.pop(chan)
|
||||||
|
programguide[floatnum]["number"] = floatnum
|
||||||
|
|
||||||
|
programguide = OrderedDict(sorted(programguide.items()))
|
||||||
|
|
||||||
|
for cnum in programguide:
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
|
||||||
|
self.epgdict = programguide
|
||||||
|
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
|
self.fhdhr.logger.info("Wrote " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
self.update(epg_method)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if time.time() >= (self.fhdhr.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
|
||||||
|
self.update(epg_method)
|
||||||
|
time.sleep(3600)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
@ -1,318 +0,0 @@
|
|||||||
import time
|
|
||||||
import datetime
|
|
||||||
import threading
|
|
||||||
|
|
||||||
from fHDHR.tools import channel_sort
|
|
||||||
|
|
||||||
from .blocks import blocksEPG
|
|
||||||
|
|
||||||
|
|
||||||
class EPG():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels, origins):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.origins = origins
|
|
||||||
self.channels = channels
|
|
||||||
|
|
||||||
self.epgdict = {}
|
|
||||||
|
|
||||||
self.epg_methods = self.fhdhr.config.dict["epg"]["method"] or []
|
|
||||||
self.valid_epg_methods = [x for x in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()) if x and x not in [None, "None"]]
|
|
||||||
|
|
||||||
self.blocks = blocksEPG(self.fhdhr, self.channels, self.origins, None)
|
|
||||||
self.epg_handling = {}
|
|
||||||
self.epg_method_selfadd()
|
|
||||||
|
|
||||||
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
|
||||||
self.sleeptime = {}
|
|
||||||
for epg_method in self.epg_methods:
|
|
||||||
if epg_method in list(self.fhdhr.config.dict.keys()):
|
|
||||||
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
|
||||||
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
|
||||||
if epg_method not in list(self.sleeptime.keys()):
|
|
||||||
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
|
||||||
|
|
||||||
self.epg_update_url = "/api/epg?method=update"
|
|
||||||
|
|
||||||
self.fhdhr.threads["epg"] = threading.Thread(target=self.run)
|
|
||||||
|
|
||||||
def clear_epg_cache(self, method=None):
|
|
||||||
|
|
||||||
if not method:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
if method not in self.valid_epg_methods:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Clearing %s EPG cache." % method)
|
|
||||||
|
|
||||||
if hasattr(self.epg_handling[method], 'clear_cache'):
|
|
||||||
self.epg_handling[method].clear_cache()
|
|
||||||
|
|
||||||
if method in list(self.epgdict.keys()):
|
|
||||||
del self.epgdict[method]
|
|
||||||
|
|
||||||
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
|
||||||
|
|
||||||
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
|
|
||||||
nowtime = time.time()
|
|
||||||
epgdict = self.get_epg(method)
|
|
||||||
if channel_number not in list(epgdict.keys()):
|
|
||||||
epgdict[channel_number] = {
|
|
||||||
"callsign": "",
|
|
||||||
"name": "",
|
|
||||||
"number": str(channel_number),
|
|
||||||
"id": "",
|
|
||||||
"thumbnail": "",
|
|
||||||
"listing": []
|
|
||||||
}
|
|
||||||
|
|
||||||
for listing in epgdict[channel_number]["listing"]:
|
|
||||||
for time_item in ["time_start", "time_end"]:
|
|
||||||
time_value = listing[time_item]
|
|
||||||
if str(time_value).endswith("+00:00"):
|
|
||||||
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
|
||||||
elif str(time_value).endswith("+0000"):
|
|
||||||
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
|
||||||
else:
|
|
||||||
listing[time_item] = int(time_value)
|
|
||||||
if int(listing["time_start"]) <= nowtime <= int(listing["time_end"]):
|
|
||||||
epgitem = epgdict[channel_number].copy()
|
|
||||||
epgitem["listing"] = [listing]
|
|
||||||
return epgitem
|
|
||||||
epgitem = epgdict[channel_number].copy()
|
|
||||||
epgitem["listing"] = [self.blocks.empty_listing(chan_obj=None, chan_dict=None)]
|
|
||||||
return epgitem
|
|
||||||
|
|
||||||
def whats_on_allchans(self, method=None):
|
|
||||||
|
|
||||||
if not method:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
if method not in self.valid_epg_methods:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
|
|
||||||
channel_guide_dict = {}
|
|
||||||
epgdict = self.get_epg(method)
|
|
||||||
epgdict = epgdict.copy()
|
|
||||||
for c in list(epgdict.keys()):
|
|
||||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
|
||||||
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
|
||||||
channel_number = chan_obj.number
|
|
||||||
epgdict[channel_number] = epgdict.pop(c)
|
|
||||||
epgdict[channel_number]["name"] = chan_obj.dict["name"]
|
|
||||||
epgdict[channel_number]["callsign"] = chan_obj.dict["callsign"]
|
|
||||||
epgdict[channel_number]["number"] = chan_obj.number
|
|
||||||
epgdict[channel_number]["id"] = chan_obj.dict["origin_id"]
|
|
||||||
epgdict[channel_number]["thumbnail"] = chan_obj.thumbnail
|
|
||||||
else:
|
|
||||||
chan_obj = None
|
|
||||||
channel_number = c
|
|
||||||
whatson = self.whats_on_now(channel_number, method, chan_dict=epgdict, chan_obj=chan_obj)
|
|
||||||
if whatson:
|
|
||||||
channel_guide_dict[channel_number] = whatson
|
|
||||||
return channel_guide_dict
|
|
||||||
|
|
||||||
def get_epg(self, method=None):
|
|
||||||
|
|
||||||
if not method:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
if method not in self.valid_epg_methods:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
|
|
||||||
if method in list(self.epgdict.keys()):
|
|
||||||
return self.epgdict[method]
|
|
||||||
|
|
||||||
self.update(method)
|
|
||||||
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
|
||||||
return self.epgdict[method]
|
|
||||||
|
|
||||||
def get_thumbnail(self, itemtype, itemid):
|
|
||||||
if itemtype == "channel":
|
|
||||||
chandict = self.find_channel_dict(itemid)
|
|
||||||
return chandict["thumbnail"]
|
|
||||||
elif itemtype == "content":
|
|
||||||
progdict = self.find_program_dict(itemid)
|
|
||||||
return progdict["thumbnail"]
|
|
||||||
return None
|
|
||||||
|
|
||||||
def find_channel_dict(self, channel_id):
|
|
||||||
epgdict = self.get_epg()
|
|
||||||
channel_list = [epgdict[x] for x in list(epgdict.keys())]
|
|
||||||
return next(item for item in channel_list if item["id"] == channel_id) or None
|
|
||||||
|
|
||||||
def find_program_dict(self, event_id):
|
|
||||||
epgdict = self.get_epg()
|
|
||||||
event_list = []
|
|
||||||
for channel in list(epgdict.keys()):
|
|
||||||
event_list.extend(epgdict[channel]["listing"])
|
|
||||||
return next(item for item in event_list if item["id"] == event_id) or None
|
|
||||||
|
|
||||||
def epg_method_selfadd(self):
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_epg":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
|
||||||
self.epg_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.channels, self.fhdhr.plugins.plugins[plugin_name].plugin_utils)
|
|
||||||
for origin in list(self.origins.origins_dict.keys()):
|
|
||||||
if origin.lower() not in list(self.epg_handling.keys()):
|
|
||||||
self.epg_handling[origin.lower()] = blocksEPG(self.fhdhr, self.channels, self.origins, origin)
|
|
||||||
self.fhdhr.config.register_valid_epg_method(origin, "Blocks")
|
|
||||||
self.valid_epg_methods.append(origin.lower())
|
|
||||||
|
|
||||||
def update(self, method=None):
|
|
||||||
|
|
||||||
if not method:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
if method not in self.valid_epg_methods:
|
|
||||||
if not self.def_method:
|
|
||||||
return
|
|
||||||
method = self.def_method
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Updating %s EPG cache." % method)
|
|
||||||
programguide = self.epg_handling[method].update_epg()
|
|
||||||
|
|
||||||
# sort the channel listings by time stamp
|
|
||||||
for cnum in list(programguide.keys()):
|
|
||||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
|
||||||
|
|
||||||
# Gernate Block periods for between EPG data, if missing
|
|
||||||
clean_prog_guide = {}
|
|
||||||
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
|
||||||
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
|
||||||
for cnum in list(programguide.keys()):
|
|
||||||
|
|
||||||
if cnum not in list(clean_prog_guide.keys()):
|
|
||||||
clean_prog_guide[cnum] = programguide[cnum].copy()
|
|
||||||
clean_prog_guide[cnum]["listing"] = []
|
|
||||||
|
|
||||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
|
||||||
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
|
|
||||||
else:
|
|
||||||
chan_obj = None
|
|
||||||
|
|
||||||
# Generate Blocks for Channels containing No Lisiings
|
|
||||||
if not len(programguide[cnum]["listing"]):
|
|
||||||
timestamps = self.blocks.timestamps_between(desired_start_time, desired_end_time)
|
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
|
||||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
# Clean Timetamps from old xmltv method to timestamps
|
|
||||||
progindex = 0
|
|
||||||
for program_item in programguide[cnum]["listing"]:
|
|
||||||
for time_item in ["time_start", "time_end"]:
|
|
||||||
time_value = programguide[cnum]["listing"][progindex][time_item]
|
|
||||||
if str(time_value).endswith("+00:00"):
|
|
||||||
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
|
||||||
elif str(time_value).endswith("+0000"):
|
|
||||||
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
|
||||||
else:
|
|
||||||
programguide[cnum]["listing"][progindex][time_item] = int(time_value)
|
|
||||||
progindex += 1
|
|
||||||
|
|
||||||
# Generate time before the listing actually starts
|
|
||||||
first_prog_time = programguide[cnum]["listing"][0]['time_start']
|
|
||||||
if desired_start_time < first_prog_time:
|
|
||||||
timestamps = self.blocks.timestamps_between(desired_start_time, first_prog_time)
|
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
|
||||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
|
||||||
|
|
||||||
# Generate time blocks between events if chunks of time are missing
|
|
||||||
progindex = 0
|
|
||||||
for program_item in programguide[cnum]["listing"]:
|
|
||||||
try:
|
|
||||||
nextprog_dict = programguide[cnum]["listing"][progindex + 1]
|
|
||||||
except IndexError:
|
|
||||||
nextprog_dict = None
|
|
||||||
if not nextprog_dict:
|
|
||||||
clean_prog_guide[cnum]["listing"].append(program_item)
|
|
||||||
else:
|
|
||||||
if nextprog_dict['time_start'] > program_item['time_end']:
|
|
||||||
timestamps = self.blocks.timestamps_between(program_item['time_end'], nextprog_dict['time_start'])
|
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
|
||||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
|
||||||
else:
|
|
||||||
clean_prog_guide[cnum]["listing"].append(program_item)
|
|
||||||
progindex += 1
|
|
||||||
|
|
||||||
# Generate time after the listing actually ends
|
|
||||||
end_prog_time = programguide[cnum]["listing"][progindex]['time_end']
|
|
||||||
if desired_end_time > end_prog_time:
|
|
||||||
timestamps = self.blocks.timestamps_between(end_prog_time, desired_end_time)
|
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
|
||||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
|
||||||
|
|
||||||
programguide = clean_prog_guide.copy()
|
|
||||||
|
|
||||||
# if a stock method, generate Blocks EPG for missing channels
|
|
||||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
|
||||||
timestamps = self.blocks.timestamps
|
|
||||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels(method)]:
|
|
||||||
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, method)
|
|
||||||
if str(chan_obj.number) not in list(programguide.keys()):
|
|
||||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
|
||||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
|
||||||
programguide[str(chan_obj.number)]["listing"].extend(clean_prog_dicts)
|
|
||||||
|
|
||||||
# Make Thumbnails for missing thumbnails
|
|
||||||
for cnum in list(programguide.keys()):
|
|
||||||
if not programguide[cnum]["thumbnail"]:
|
|
||||||
programguide[cnum]["thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % programguide[cnum]["number"]
|
|
||||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
|
||||||
prog_index = 0
|
|
||||||
for program_item in programguide[cnum]["listing"]:
|
|
||||||
if not programguide[cnum]["listing"][prog_index]["thumbnail"]:
|
|
||||||
programguide[cnum]["listing"][prog_index]["thumbnail"] = programguide[cnum]["thumbnail"]
|
|
||||||
prog_index += 1
|
|
||||||
|
|
||||||
# Get Totals
|
|
||||||
total_channels = len(list(programguide.keys()))
|
|
||||||
total_programs = 0
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(programguide.keys()))
|
|
||||||
sorted_chan_guide = {}
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
total_programs += len(programguide[cnum]["listing"])
|
|
||||||
sorted_chan_guide[channel] = programguide[channel]
|
|
||||||
|
|
||||||
self.epgdict[method] = sorted_chan_guide
|
|
||||||
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
|
||||||
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
|
||||||
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (method, total_programs, total_channels))
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.fhdhr.logger.info("EPG Update Thread Starting")
|
|
||||||
self.fhdhr.threads["epg"].start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.fhdhr.logger.info("EPG Update Thread Stopping")
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
time.sleep(1800)
|
|
||||||
while True:
|
|
||||||
for epg_method in self.epg_methods:
|
|
||||||
last_update_time = self.fhdhr.db.get_fhdhr_value("update_time", epg_method)
|
|
||||||
updatetheepg = False
|
|
||||||
if not last_update_time:
|
|
||||||
updatetheepg = True
|
|
||||||
elif time.time() >= (last_update_time + self.sleeptime[epg_method]):
|
|
||||||
updatetheepg = True
|
|
||||||
if updatetheepg:
|
|
||||||
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
|
||||||
time.sleep(1800)
|
|
||||||
|
|
||||||
self.stop()
|
|
||||||
@ -1,120 +0,0 @@
|
|||||||
import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class blocksEPG():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, channels, origins, origin):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
self.channels = channels
|
|
||||||
self.origins = origins
|
|
||||||
self.origin = origin
|
|
||||||
|
|
||||||
def update_epg(self):
|
|
||||||
programguide = {}
|
|
||||||
|
|
||||||
timestamps = self.timestamps
|
|
||||||
|
|
||||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels(self.origin)]:
|
|
||||||
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, self.origin)
|
|
||||||
|
|
||||||
if str(chan_obj.number) not in list(programguide.keys()):
|
|
||||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
|
||||||
|
|
||||||
clean_prog_dicts = self.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
|
||||||
for clean_prog_dict in clean_prog_dicts:
|
|
||||||
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
|
|
||||||
|
|
||||||
return programguide
|
|
||||||
|
|
||||||
@property
|
|
||||||
def timestamps(self):
|
|
||||||
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
|
||||||
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
|
||||||
return self.timestamps_between(desired_start_time, desired_end_time)
|
|
||||||
|
|
||||||
def timestamps_between(self, starttime, endtime):
|
|
||||||
timestamps = []
|
|
||||||
desired_blocksize = self.fhdhr.config.dict["epg"]["block_size"]
|
|
||||||
current_time = starttime
|
|
||||||
while (current_time + desired_blocksize) <= endtime:
|
|
||||||
timestampdict = {
|
|
||||||
"time_start": current_time,
|
|
||||||
"time_end": current_time + desired_blocksize,
|
|
||||||
}
|
|
||||||
timestamps.append(timestampdict)
|
|
||||||
current_time += desired_blocksize
|
|
||||||
if current_time < endtime:
|
|
||||||
timestampdict = {
|
|
||||||
"time_start": current_time,
|
|
||||||
"time_end": endtime
|
|
||||||
}
|
|
||||||
timestamps.append(timestampdict)
|
|
||||||
return timestamps
|
|
||||||
|
|
||||||
def single_channel_epg(self, timestampdict, chan_obj=None, chan_dict=None):
|
|
||||||
|
|
||||||
if chan_obj:
|
|
||||||
content_id = "%s_%s" % (chan_obj.dict["origin_id"], timestampdict['time_start'])
|
|
||||||
elif chan_dict:
|
|
||||||
content_id = "%s_%s" % (chan_dict["id"], timestampdict['time_start'])
|
|
||||||
|
|
||||||
clean_prog_dict = {
|
|
||||||
"time_start": timestampdict['time_start'],
|
|
||||||
"time_end": timestampdict['time_end'],
|
|
||||||
"duration_minutes": (timestampdict['time_end'] - timestampdict['time_start']) / 60,
|
|
||||||
"title": "Unavailable",
|
|
||||||
"sub-title": "Unavailable",
|
|
||||||
"description": "Unavailable",
|
|
||||||
"rating": "N/A",
|
|
||||||
"episodetitle": None,
|
|
||||||
"releaseyear": None,
|
|
||||||
"genres": [],
|
|
||||||
"seasonnumber": None,
|
|
||||||
"episodenumber": None,
|
|
||||||
"isnew": False,
|
|
||||||
"id": content_id,
|
|
||||||
}
|
|
||||||
if chan_obj:
|
|
||||||
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
|
||||||
elif chan_dict:
|
|
||||||
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
|
||||||
if not clean_prog_dict["thumbnail"]:
|
|
||||||
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=%s" % content_id
|
|
||||||
|
|
||||||
return clean_prog_dict
|
|
||||||
|
|
||||||
def empty_channel_epg(self, timestamps, chan_obj=None, chan_dict=None):
|
|
||||||
clean_prog_dicts = []
|
|
||||||
for timestampdict in timestamps:
|
|
||||||
clean_prog_dict = self.single_channel_epg(timestampdict, chan_obj=chan_obj, chan_dict=chan_dict)
|
|
||||||
clean_prog_dicts.append(clean_prog_dict)
|
|
||||||
return clean_prog_dicts
|
|
||||||
|
|
||||||
def empty_listing(self, chan_obj=None, chan_dict=None):
|
|
||||||
clean_prog_dict = {
|
|
||||||
"time_start": None,
|
|
||||||
"time_end": None,
|
|
||||||
"duration_minutes": None,
|
|
||||||
"title": "Unavailable",
|
|
||||||
"sub-title": "Unavailable",
|
|
||||||
"description": "Unavailable",
|
|
||||||
"rating": "N/A",
|
|
||||||
"episodetitle": None,
|
|
||||||
"releaseyear": None,
|
|
||||||
"genres": [],
|
|
||||||
"seasonnumber": None,
|
|
||||||
"episodenumber": None,
|
|
||||||
"isnew": False,
|
|
||||||
"id": "Unavailable",
|
|
||||||
}
|
|
||||||
|
|
||||||
if chan_obj:
|
|
||||||
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
|
||||||
elif chan_dict:
|
|
||||||
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
|
||||||
else:
|
|
||||||
clean_prog_dict["thumbnail"] = None
|
|
||||||
if not clean_prog_dict["thumbnail"]:
|
|
||||||
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=Unavailable"
|
|
||||||
|
|
||||||
return clean_prog_dict
|
|
||||||
0
fHDHR/device/epgtypes/__init__.py
Normal file
0
fHDHR/device/epgtypes/__init__.py
Normal file
66
fHDHR/device/epgtypes/blocks.py
Normal file
66
fHDHR/device/epgtypes/blocks.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class blocksEPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
timestamps = []
|
||||||
|
todaydate = datetime.date.today()
|
||||||
|
for x in range(0, 6):
|
||||||
|
xdate = todaydate + datetime.timedelta(days=x)
|
||||||
|
xtdate = xdate + datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
for hour in range(0, 24):
|
||||||
|
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
|
||||||
|
if hour + 1 < 24:
|
||||||
|
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
|
||||||
|
else:
|
||||||
|
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
|
||||||
|
for fhdhr_id in list(self.channels.list.keys()):
|
||||||
|
c = self.channels.list[fhdhr_id].dict
|
||||||
|
|
||||||
|
if str(c["number"]) not in list(programguide.keys()):
|
||||||
|
programguide[str(c["number"])] = {
|
||||||
|
"callsign": c["callsign"],
|
||||||
|
"name": c["name"],
|
||||||
|
"number": c["number"],
|
||||||
|
"id": c["origin_id"],
|
||||||
|
"thumbnail": ("/api/images?method=generate&type=channel&message=%s" % (str(c['number']))),
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
for timestamp in timestamps:
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": timestamp['time_start'],
|
||||||
|
"time_end": timestamp['time_end'],
|
||||||
|
"duration_minutes": 60,
|
||||||
|
"thumbnail": ("/api/images?method=generate&type=content&message=%s" % (str(c["origin_id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
|
||||||
|
"title": "Unavailable",
|
||||||
|
"sub-title": "Unavailable",
|
||||||
|
"description": "Unavailable",
|
||||||
|
"rating": "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": str(c["origin_id"]) + "_" + str(timestamp['time_start']).split(" ")[0],
|
||||||
|
}
|
||||||
|
|
||||||
|
programguide[str(c["number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
return programguide
|
||||||
@ -5,26 +5,26 @@ from fHDHR.tools import xmldictmaker
|
|||||||
from fHDHR.exceptions import EPGSetupError
|
from fHDHR.exceptions import EPGSetupError
|
||||||
|
|
||||||
|
|
||||||
class Plugin_OBJ():
|
class zap2itEPG():
|
||||||
|
|
||||||
def __init__(self, channels, plugin_utils):
|
def __init__(self, fhdhr, channels):
|
||||||
self.plugin_utils = plugin_utils
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
|
||||||
@property
|
self.postalcode = self.fhdhr.config.dict["zap2it"]["postalcode"]
|
||||||
def postalcode(self):
|
|
||||||
if self.plugin_utils.config.dict["zap2it"]["postalcode"]:
|
def get_location(self):
|
||||||
return self.plugin_utils.config.dict["zap2it"]["postalcode"]
|
self.fhdhr.logger.warning("Zap2it postalcode not set, attempting to retrieve.")
|
||||||
try:
|
if not self.postalcode:
|
||||||
postalcode_url = 'http://ipinfo.io/json'
|
try:
|
||||||
postalcode_req = self.plugin_utils.web.session.get(postalcode_url)
|
postalcode_url = 'http://ipinfo.io/json'
|
||||||
data = postalcode_req.json()
|
postalcode_req = self.fhdhr.web.session.get(postalcode_url)
|
||||||
postalcode = data["postal"]
|
data = postalcode_req.json()
|
||||||
except Exception as e:
|
self.postalcode = data["postal"]
|
||||||
raise EPGSetupError("Unable to automatically optain postalcode: %s" % e)
|
except Exception as e:
|
||||||
postalcode = None
|
raise EPGSetupError("Unable to automatically optain zap2it postalcode: " + str(e))
|
||||||
return postalcode
|
return self.postalcode
|
||||||
|
|
||||||
def update_epg(self):
|
def update_epg(self):
|
||||||
programguide = {}
|
programguide = {}
|
||||||
@ -32,12 +32,12 @@ class Plugin_OBJ():
|
|||||||
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
||||||
zap_time = datetime.datetime.utcnow().timestamp()
|
zap_time = datetime.datetime.utcnow().timestamp()
|
||||||
self.remove_stale_cache(zap_time)
|
self.remove_stale_cache(zap_time)
|
||||||
zap_time_window = int(self.plugin_utils.config.dict["zap2it"]["timespan"]) * 3600
|
zap_time_window = int(self.fhdhr.config.dict["zap2it"]["timespan"]) * 3600
|
||||||
zap_time = int(zap_time - (zap_time % zap_time_window))
|
zap_time = int(zap_time - (zap_time % zap_time_window))
|
||||||
|
|
||||||
# Fetch data in `zap_timespan` chunks.
|
# Fetch data in `zap_timespan` chunks.
|
||||||
i_times = []
|
i_times = []
|
||||||
for i in range(int(7 * 24 / int(self.plugin_utils.config.dict["zap2it"]["timespan"]))):
|
for i in range(int(7 * 24 / int(self.fhdhr.config.dict["zap2it"]["timespan"]))):
|
||||||
i_times.append(zap_time + (i * zap_time_window))
|
i_times.append(zap_time + (i * zap_time_window))
|
||||||
|
|
||||||
cached_items = self.get_cached(i_times)
|
cached_items = self.get_cached(i_times)
|
||||||
@ -63,13 +63,11 @@ class Plugin_OBJ():
|
|||||||
eventdict = xmldictmaker(event, ["startTime", "endTime", "duration", "rating", "flag"], list_items=["filter", "flag"])
|
eventdict = xmldictmaker(event, ["startTime", "endTime", "duration", "rating", "flag"], list_items=["filter", "flag"])
|
||||||
progdict = xmldictmaker(event['program'], ["title", "sub-title", "releaseYear", "episodeTitle", "shortDesc", "season", "episode", "id"])
|
progdict = xmldictmaker(event['program'], ["title", "sub-title", "releaseYear", "episodeTitle", "shortDesc", "season", "episode", "id"])
|
||||||
|
|
||||||
timestamp = self.zap2it_timestamps(eventdict['startTime'], eventdict['endTime'])
|
|
||||||
|
|
||||||
clean_prog_dict = {
|
clean_prog_dict = {
|
||||||
"time_start": timestamp['time_start'],
|
"time_start": self.xmltimestamp_zap(eventdict['startTime']),
|
||||||
"time_end": timestamp['time_end'],
|
"time_end": self.xmltimestamp_zap(eventdict['endTime']),
|
||||||
"duration_minutes": eventdict['duration'],
|
"duration_minutes": eventdict['duration'],
|
||||||
"thumbnail": "https://zap2it.tmsimg.com/assets/%s.jpg" % eventdict['thumbnail'],
|
"thumbnail": str("https://zap2it.tmsimg.com/assets/" + str(eventdict['thumbnail']) + ".jpg"),
|
||||||
"title": progdict['title'] or "Unavailable",
|
"title": progdict['title'] or "Unavailable",
|
||||||
"sub-title": progdict['sub-title'] or "Unavailable",
|
"sub-title": progdict['sub-title'] or "Unavailable",
|
||||||
"description": progdict['shortDesc'] or "Unavailable",
|
"description": progdict['shortDesc'] or "Unavailable",
|
||||||
@ -80,30 +78,30 @@ class Plugin_OBJ():
|
|||||||
"seasonnumber": progdict['season'],
|
"seasonnumber": progdict['season'],
|
||||||
"episodenumber": progdict['episode'],
|
"episodenumber": progdict['episode'],
|
||||||
"isnew": False,
|
"isnew": False,
|
||||||
"id": str(progdict['id'] or "%s_%s" % (cdict["channelId"], timestamp['time_start'])),
|
"id": str(progdict['id'] or self.xmltimestamp_zap(eventdict['startTime'])),
|
||||||
}
|
}
|
||||||
|
|
||||||
for f in eventdict['filter']:
|
for f in eventdict['filter']:
|
||||||
clean_prog_dict["genres"].append(f.replace('filter-', ''))
|
clean_prog_dict["genres"].append(f.replace('filter-', ''))
|
||||||
|
|
||||||
if 'movie' in clean_prog_dict['genres'] and clean_prog_dict['releaseyear']:
|
if 'movie' in clean_prog_dict['genres'] and clean_prog_dict['releaseyear']:
|
||||||
clean_prog_dict["sub-title"] = 'Movie: %s' % clean_prog_dict['releaseyear']
|
clean_prog_dict["sub-title"] = 'Movie: ' + clean_prog_dict['releaseyear']
|
||||||
elif clean_prog_dict['episodetitle']:
|
elif clean_prog_dict['episodetitle']:
|
||||||
clean_prog_dict["sub-title"] = clean_prog_dict['episodetitle']
|
clean_prog_dict["sub-title"] = clean_prog_dict['episodetitle']
|
||||||
|
|
||||||
if 'New' in eventdict['flag'] and 'live' not in eventdict['flag']:
|
if 'New' in eventdict['flag'] and 'live' not in eventdict['flag']:
|
||||||
clean_prog_dict["isnew"] = True
|
clean_prog_dict["isnew"] = True
|
||||||
|
|
||||||
if not any((d['time_start'] == clean_prog_dict['time_start'] and d['id'] == clean_prog_dict['id']) for d in programguide[str(cdict["channelNo"])]["listing"]):
|
if not any(d['id'] == clean_prog_dict['id'] for d in programguide[str(cdict["channelNo"])]["listing"]):
|
||||||
programguide[str(cdict["channelNo"])]["listing"].append(clean_prog_dict)
|
programguide[str(cdict["channelNo"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
return programguide
|
return programguide
|
||||||
|
|
||||||
def zap2it_timestamps(self, starttime, endtime):
|
def xmltimestamp_zap(self, inputtime):
|
||||||
timestamp = {}
|
xmltime = inputtime.replace('Z', '+00:00')
|
||||||
for time_item, time_value in zip(["time_start", "time_end"], [starttime, endtime]):
|
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||||
timestamp[time_item] = datetime.datetime.fromisoformat(time_value.replace('Z', '+00:00')).timestamp()
|
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||||
return timestamp
|
return xmltime
|
||||||
|
|
||||||
def get_cached(self, i_times):
|
def get_cached(self, i_times):
|
||||||
|
|
||||||
@ -111,18 +109,18 @@ class Plugin_OBJ():
|
|||||||
for i_time in i_times:
|
for i_time in i_times:
|
||||||
|
|
||||||
parameters = {
|
parameters = {
|
||||||
'aid': self.plugin_utils.config.dict["zap2it"]['affiliate_id'],
|
'aid': self.fhdhr.config.dict["zap2it"]['affiliate_id'],
|
||||||
'country': self.plugin_utils.config.dict["zap2it"]['country'],
|
'country': self.fhdhr.config.dict["zap2it"]['country'],
|
||||||
'device': self.plugin_utils.config.dict["zap2it"]['device'],
|
'device': self.fhdhr.config.dict["zap2it"]['device'],
|
||||||
'headendId': self.plugin_utils.config.dict["zap2it"]['headendid'],
|
'headendId': self.fhdhr.config.dict["zap2it"]['headendid'],
|
||||||
'isoverride': "true",
|
'isoverride': "true",
|
||||||
'languagecode': self.plugin_utils.config.dict["zap2it"]['languagecode'],
|
'languagecode': self.fhdhr.config.dict["zap2it"]['languagecode'],
|
||||||
'pref': 'm,p',
|
'pref': 'm,p',
|
||||||
'timespan': self.plugin_utils.config.dict["zap2it"]['timespan'],
|
'timespan': self.fhdhr.config.dict["zap2it"]['timespan'],
|
||||||
'timezone': self.plugin_utils.config.dict["zap2it"]['timezone'],
|
'timezone': self.fhdhr.config.dict["zap2it"]['timezone'],
|
||||||
'userId': self.plugin_utils.config.dict["zap2it"]['userid'],
|
'userId': self.fhdhr.config.dict["zap2it"]['userid'],
|
||||||
'postalCode': str(self.postalcode),
|
'postalCode': str(self.postalcode or self.get_location()),
|
||||||
'lineupId': '%s-%s-DEFAULT' % (self.plugin_utils.config.dict["zap2it"]['country'], self.plugin_utils.config.dict["zap2it"]['device']),
|
'lineupId': '%s-%s-DEFAULT' % (self.fhdhr.config.dict["zap2it"]['country'], self.fhdhr.config.dict["zap2it"]['device']),
|
||||||
'time': i_time,
|
'time': i_time,
|
||||||
'Activity_ID': 1,
|
'Activity_ID': 1,
|
||||||
'FromPage': "TV%20Guide",
|
'FromPage': "TV%20Guide",
|
||||||
@ -131,43 +129,43 @@ class Plugin_OBJ():
|
|||||||
url = 'https://tvlistings.zap2it.com/api/grid?'
|
url = 'https://tvlistings.zap2it.com/api/grid?'
|
||||||
url += urllib.parse.urlencode(parameters)
|
url += urllib.parse.urlencode(parameters)
|
||||||
self.get_cached_item(str(i_time), url)
|
self.get_cached_item(str(i_time), url)
|
||||||
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "zap2it") or []
|
||||||
return [self.plugin_utils.db.get_plugin_value(x, "epg_cache", "zap2it") for x in cache_list]
|
return [self.fhdhr.db.get_cacheitem_value(x, "offline_cache", "zap2it") for x in cache_list]
|
||||||
|
|
||||||
def get_cached_item(self, cache_key, url):
|
def get_cached_item(self, cache_key, url):
|
||||||
cacheitem = self.plugin_utils.db.get_plugin_value(cache_key, "epg_cache", "zap2it")
|
cacheitem = self.fhdhr.db.get_cacheitem_value(cache_key, "offline_cache", "zap2it")
|
||||||
if cacheitem:
|
if cacheitem:
|
||||||
self.plugin_utils.logger.info("FROM CACHE: %s" % cache_key)
|
self.fhdhr.logger.info('FROM CACHE: ' + str(cache_key))
|
||||||
return cacheitem
|
return cacheitem
|
||||||
else:
|
else:
|
||||||
self.plugin_utils.logger.info("Fetching: %s" % url)
|
self.fhdhr.logger.info('Fetching: ' + url)
|
||||||
try:
|
try:
|
||||||
resp = self.plugin_utils.web.session.get(url)
|
resp = self.fhdhr.web.session.get(url)
|
||||||
except self.plugin_utils.web.exceptions.HTTPError:
|
except self.fhdhr.web.exceptions.HTTPError:
|
||||||
self.plugin_utils.logger.info('Got an error! Ignoring it.')
|
self.fhdhr.logger.info('Got an error! Ignoring it.')
|
||||||
return
|
return
|
||||||
result = resp.json()
|
result = resp.json()
|
||||||
|
|
||||||
self.plugin_utils.db.set_plugin_value(cache_key, "epg_cache", result, "zap2it")
|
self.fhdhr.db.set_cacheitem_value(cache_key, "offline_cache", result, "zap2it")
|
||||||
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "zap2it") or []
|
||||||
cache_list.append(cache_key)
|
cache_list.append(cache_key)
|
||||||
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", cache_list, "zap2it")
|
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", cache_list, "zap2it")
|
||||||
|
|
||||||
def remove_stale_cache(self, zap_time):
|
def remove_stale_cache(self, zap_time):
|
||||||
|
|
||||||
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "zap2it") or []
|
||||||
cache_to_kill = []
|
cache_to_kill = []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
cachedate = int(cacheitem)
|
cachedate = int(cacheitem)
|
||||||
if cachedate < zap_time:
|
if cachedate < zap_time:
|
||||||
cache_to_kill.append(cacheitem)
|
cache_to_kill.append(cacheitem)
|
||||||
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "zap2it")
|
self.fhdhr.db.delete_cacheitem_value(cacheitem, "offline_cache", "zap2it")
|
||||||
self.plugin_utils.logger.info("Removing stale cache: %s" % cacheitem)
|
self.fhdhr.logger.info('Removing stale cache: ' + str(cacheitem))
|
||||||
self.plugin_utils.db.set_plugin_value("cache_list", "epg_cache", [x for x in cache_list if x not in cache_to_kill], "zap2it")
|
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", [x for x in cache_list if x not in cache_to_kill], "zap2it")
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
cache_list = self.plugin_utils.db.get_plugin_value("cache_list", "epg_cache", "zap2it") or []
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "zap2it") or []
|
||||||
for cacheitem in cache_list:
|
for cacheitem in cache_list:
|
||||||
self.plugin_utils.db.delete_plugin_value(cacheitem, "epg_cache", "zap2it")
|
self.fhdhr.db.delete_cacheitem_value(cacheitem, "offline_cache", "zap2it")
|
||||||
self.plugin_utils.logger.info("Removing cache: %s" % cacheitem)
|
self.fhdhr.logger.info('Removing cache: ' + str(cacheitem))
|
||||||
self.plugin_utils.db.delete_plugin_value("cache_list", "epg_cache", "zap2it")
|
self.fhdhr.db.delete_cacheitem_value("cache_list", "offline_cache", "zap2it")
|
||||||
196
fHDHR/device/ssdp.py
Normal file
196
fHDHR/device/ssdp.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Detect():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
|
def set(self, location):
|
||||||
|
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
if location not in detect_list:
|
||||||
|
detect_list.append(location)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
|
||||||
|
|
||||||
|
class SSDPServer():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.detect_method = fHDHR_Detect(fhdhr)
|
||||||
|
|
||||||
|
if fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
self.sock = None
|
||||||
|
self.proto = "ipv4"
|
||||||
|
self.port = 1900
|
||||||
|
self.iface = None
|
||||||
|
self.address = None
|
||||||
|
self.server = 'fHDHR/%s UPnP/1.0' % fhdhr.version
|
||||||
|
|
||||||
|
allowed_protos = ("ipv4", "ipv6")
|
||||||
|
if self.proto not in allowed_protos:
|
||||||
|
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||||
|
|
||||||
|
self.nt = 'urn:schemas-upnp-org:device:MediaServer:1'
|
||||||
|
self.usn = 'uuid:' + fhdhr.config.dict["main"]["uuid"] + '::' + self.nt
|
||||||
|
self.location = ('http://' + fhdhr.config.dict["fhdhr"]["discovery_address"] + ':' +
|
||||||
|
str(fhdhr.config.dict["fhdhr"]["port"]) + '/device.xml')
|
||||||
|
self.al = self.location
|
||||||
|
self.max_age = 1800
|
||||||
|
self._iface = None
|
||||||
|
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
self._af_type = socket.AF_INET
|
||||||
|
self._broadcast_ip = "239.255.255.250"
|
||||||
|
self._address = (self._broadcast_ip, self.port)
|
||||||
|
self.bind_address = "0.0.0.0"
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
self._af_type = socket.AF_INET6
|
||||||
|
self._broadcast_ip = "ff02::c"
|
||||||
|
self._address = (self._broadcast_ip, self.port, 0, 0)
|
||||||
|
self.bind_address = "::"
|
||||||
|
|
||||||
|
self.broadcast_addy = "{}:{}".format(self._broadcast_ip, self.port)
|
||||||
|
|
||||||
|
self.sock = socket.socket(self._af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
# Bind to specific interface
|
||||||
|
if self.iface is not None:
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||||
|
|
||||||
|
# Subscribe to multicast address
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
mreq = socket.inet_aton(self._broadcast_ip)
|
||||||
|
if self.address is not None:
|
||||||
|
mreq += socket.inet_aton(self.address)
|
||||||
|
else:
|
||||||
|
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq,
|
||||||
|
)
|
||||||
|
# Allow multicasts on loopback devices (necessary for testing)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||||
|
mreq = socket.inet_pton(socket.AF_INET6, self._broadcast_ip)
|
||||||
|
if self.iface is not None:
|
||||||
|
iface_index = socket.if_nametoindex(self.iface)
|
||||||
|
# Send outgoing packets from the same interface
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||||
|
mreq += struct.pack(b"@I", iface_index)
|
||||||
|
else:
|
||||||
|
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||||
|
)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||||
|
self.sock.bind((self.bind_address, self.port))
|
||||||
|
|
||||||
|
self.notify_payload = self.create_notify_payload()
|
||||||
|
self.msearch_payload = self.create_msearch_payload()
|
||||||
|
|
||||||
|
self.m_search()
|
||||||
|
|
||||||
|
def on_recv(self, data, address):
|
||||||
|
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
||||||
|
|
||||||
|
(host, port) = address
|
||||||
|
|
||||||
|
try:
|
||||||
|
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||||
|
except ValueError:
|
||||||
|
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
||||||
|
return
|
||||||
|
|
||||||
|
lines = header.split('\r\n')
|
||||||
|
cmd = lines[0].split(' ')
|
||||||
|
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||||
|
lines = filter(lambda x: len(x) > 0, lines)
|
||||||
|
|
||||||
|
headers = [x.split(':', 1) for x in lines]
|
||||||
|
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||||
|
|
||||||
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
|
# SSDP discovery
|
||||||
|
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
|
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
||||||
|
notify = self.notify_payload
|
||||||
|
self.fhdhr.logger.debug("Created NOTIFY: {}".format(notify))
|
||||||
|
try:
|
||||||
|
self.sock.sendto(notify, address)
|
||||||
|
except OSError as e:
|
||||||
|
# Most commonly: We received a multicast from an IP not in our subnet
|
||||||
|
self.fhdhr.logger.debug("Unable to send NOTIFY to {}: {}".format(address, e))
|
||||||
|
pass
|
||||||
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
# SSDP presence
|
||||||
|
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
|
try:
|
||||||
|
if headers["server"].startswith("fHDHR"):
|
||||||
|
if headers["location"] != self.location:
|
||||||
|
self.detect_method.set(headers["location"].split("/device.xml")[0])
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
|
def m_search(self):
|
||||||
|
data = self.msearch_payload
|
||||||
|
self.sock.sendto(data, self._address)
|
||||||
|
|
||||||
|
def create_notify_payload(self):
|
||||||
|
if self.max_age is not None and not isinstance(self.max_age, int):
|
||||||
|
raise ValueError("max_age must by of type: int")
|
||||||
|
data = (
|
||||||
|
"NOTIFY * HTTP/1.1\r\n"
|
||||||
|
"HOST:{}\r\n"
|
||||||
|
"NT:{}\r\n"
|
||||||
|
"NTS:ssdp:alive\r\n"
|
||||||
|
"USN:{}\r\n"
|
||||||
|
"SERVER:{}\r\n"
|
||||||
|
).format(
|
||||||
|
self._broadcast_ip,
|
||||||
|
self.nt,
|
||||||
|
self.usn,
|
||||||
|
self.server
|
||||||
|
)
|
||||||
|
if self.location is not None:
|
||||||
|
data += "LOCATION:{}\r\n".format(self.location)
|
||||||
|
if self.al is not None:
|
||||||
|
data += "AL:{}\r\n".format(self.al)
|
||||||
|
if self.max_age is not None:
|
||||||
|
data += "Cache-Control:max-age={}\r\n".format(self.max_age)
|
||||||
|
data += "\r\n"
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def create_msearch_payload(self):
|
||||||
|
data = (
|
||||||
|
"M-SEARCH * HTTP/1.1\r\n"
|
||||||
|
"HOST:{}\r\n"
|
||||||
|
'MAN: "ssdp:discover"\r\n'
|
||||||
|
"ST:{}\r\n"
|
||||||
|
"MX:{}\r\n"
|
||||||
|
).format(
|
||||||
|
self.broadcast_addy,
|
||||||
|
"ssdp:all",
|
||||||
|
1
|
||||||
|
)
|
||||||
|
data += "\r\n"
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
data, address = self.sock.recvfrom(1024)
|
||||||
|
self.on_recv(data, address)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.sock.close()
|
||||||
@ -1,214 +0,0 @@
|
|||||||
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
|
||||||
import socket
|
|
||||||
import struct
|
|
||||||
import time
|
|
||||||
import threading
|
|
||||||
|
|
||||||
|
|
||||||
class SSDPServer():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.ssdp_handling = {}
|
|
||||||
self.methods = [x for x in list(self.fhdhr.plugins.plugins.keys()) if self.fhdhr.plugins.plugins[x].type == "ssdp"]
|
|
||||||
|
|
||||||
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
|
||||||
self.fhdhr.config.dict["ssdp"]["enabled"] and
|
|
||||||
len(self.methods)):
|
|
||||||
|
|
||||||
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
|
|
||||||
self.setup_ssdp()
|
|
||||||
|
|
||||||
self.sock.bind((self.bind_address, 1900))
|
|
||||||
|
|
||||||
self.msearch_payload = self.create_msearch_payload()
|
|
||||||
|
|
||||||
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
|
||||||
self.age_time = None
|
|
||||||
|
|
||||||
self.ssdp_method_selfadd()
|
|
||||||
|
|
||||||
self.do_alive()
|
|
||||||
self.m_search()
|
|
||||||
|
|
||||||
def ssdp_method_selfadd(self):
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].type == "ssdp":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
|
||||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
|
||||||
self.ssdp_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils, self.broadcast_ip, self.max_age)
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.fhdhr.logger.info("SSDP Server Starting")
|
|
||||||
self.fhdhr.threads["ssdp"].start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.fhdhr.logger.info("SSDP Server Stopping")
|
|
||||||
self.sock.close()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
while True:
|
|
||||||
data, address = self.sock.recvfrom(1024)
|
|
||||||
self.on_recv(data, address)
|
|
||||||
self.do_alive()
|
|
||||||
self.stop()
|
|
||||||
|
|
||||||
def do_alive(self, forcealive=False):
|
|
||||||
|
|
||||||
send_alive = False
|
|
||||||
if not self.age_time:
|
|
||||||
send_alive = True
|
|
||||||
elif forcealive:
|
|
||||||
send_alive = True
|
|
||||||
elif time.time() >= (self.age_time + self.max_age):
|
|
||||||
send_alive = True
|
|
||||||
|
|
||||||
if send_alive:
|
|
||||||
self.fhdhr.logger.info("Sending Alive message to network.")
|
|
||||||
self.do_notify(self.broadcast_address_tuple)
|
|
||||||
self.age_time = time.time()
|
|
||||||
|
|
||||||
def do_notify(self, address):
|
|
||||||
|
|
||||||
notify_list = []
|
|
||||||
for ssdp_handler in list(self.ssdp_handling.keys()):
|
|
||||||
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'notify'):
|
|
||||||
notify_data = self.ssdp_handling[ssdp_handler].notify
|
|
||||||
if isinstance(notify_data, list):
|
|
||||||
notify_list.extend(notify_data)
|
|
||||||
else:
|
|
||||||
notify_list.append(notify_data)
|
|
||||||
|
|
||||||
for notifydata in notify_list:
|
|
||||||
notifydata = notifydata.encode("utf-8")
|
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
|
||||||
try:
|
|
||||||
self.sock.sendto(notifydata, address)
|
|
||||||
except OSError as e:
|
|
||||||
# Most commonly: We received a multicast from an IP not in our subnet
|
|
||||||
self.fhdhr.logger.debug("Unable to send NOTIFY: %s" % e)
|
|
||||||
pass
|
|
||||||
|
|
||||||
def on_recv(self, data, address):
|
|
||||||
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
|
||||||
|
|
||||||
try:
|
|
||||||
header, payload = data.decode().split('\r\n\r\n')[:2]
|
|
||||||
except ValueError:
|
|
||||||
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
|
||||||
return
|
|
||||||
|
|
||||||
lines = header.split('\r\n')
|
|
||||||
cmd = lines[0].split(' ')
|
|
||||||
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
|
||||||
lines = filter(lambda x: len(x) > 0, lines)
|
|
||||||
|
|
||||||
headers = [x.split(':', 1) for x in lines]
|
|
||||||
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
|
||||||
|
|
||||||
for ssdp_handler in list(self.ssdp_handling.keys()):
|
|
||||||
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'on_recv'):
|
|
||||||
self.ssdp_handling[ssdp_handler].on_recv(headers, cmd, list(self.ssdp_handling.keys()))
|
|
||||||
|
|
||||||
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
|
||||||
# SSDP discovery
|
|
||||||
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
|
||||||
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
|
||||||
|
|
||||||
self.do_notify(address)
|
|
||||||
|
|
||||||
if cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
|
||||||
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
|
||||||
|
|
||||||
def m_search(self):
|
|
||||||
data = self.msearch_payload
|
|
||||||
self.sock.sendto(data, self.broadcast_address_tuple)
|
|
||||||
|
|
||||||
def create_msearch_payload(self):
|
|
||||||
|
|
||||||
data = ''
|
|
||||||
data_command = "M-SEARCH * HTTP/1.1"
|
|
||||||
|
|
||||||
data_dict = {
|
|
||||||
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
|
||||||
"MAN": "ssdp:discover",
|
|
||||||
"ST": "ssdp:all",
|
|
||||||
"MX": 1,
|
|
||||||
}
|
|
||||||
|
|
||||||
data += "%s\r\n" % data_command
|
|
||||||
for data_key in list(data_dict.keys()):
|
|
||||||
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
|
||||||
data += "\r\n"
|
|
||||||
|
|
||||||
return data.encode("utf-8")
|
|
||||||
|
|
||||||
def setup_ssdp(self):
|
|
||||||
self.sock = None
|
|
||||||
|
|
||||||
self.proto = self.setup_proto()
|
|
||||||
self.iface = self.fhdhr.config.dict["ssdp"]["iface"]
|
|
||||||
self.address = self.fhdhr.config.dict["ssdp"]["multicast_address"]
|
|
||||||
self.setup_addressing()
|
|
||||||
|
|
||||||
self.sock = socket.socket(self.af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
|
||||||
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
|
||||||
|
|
||||||
self.setup_interface()
|
|
||||||
|
|
||||||
self.setup_multicasting()
|
|
||||||
|
|
||||||
def setup_proto(self):
|
|
||||||
proto = self.fhdhr.config.dict["ssdp"]["proto"]
|
|
||||||
allowed_protos = ("ipv4", "ipv6")
|
|
||||||
if proto not in allowed_protos:
|
|
||||||
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
|
||||||
return proto
|
|
||||||
|
|
||||||
def setup_addressing(self):
|
|
||||||
if self.proto == "ipv4":
|
|
||||||
self.af_type = socket.AF_INET
|
|
||||||
self.broadcast_ip = "239.255.255.250"
|
|
||||||
self.broadcast_address_tuple = (self.broadcast_ip, 1900)
|
|
||||||
self.bind_address = "0.0.0.0"
|
|
||||||
elif self.proto == "ipv6":
|
|
||||||
self.af_type = socket.AF_INET6
|
|
||||||
self.broadcast_ip = "ff02::c"
|
|
||||||
self.broadcast_address_tuple = (self.broadcast_ip, 1900, 0, 0)
|
|
||||||
self.bind_address = "::"
|
|
||||||
|
|
||||||
def setup_interface(self):
|
|
||||||
# Bind to specific interface
|
|
||||||
if self.iface is not None:
|
|
||||||
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
|
||||||
|
|
||||||
def setup_multicasting(self):
|
|
||||||
# Subscribe to multicast address
|
|
||||||
if self.proto == "ipv4":
|
|
||||||
mreq = socket.inet_aton(self.broadcast_ip)
|
|
||||||
if self.address is not None:
|
|
||||||
mreq += socket.inet_aton(self.address)
|
|
||||||
else:
|
|
||||||
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
|
||||||
self.sock.setsockopt(
|
|
||||||
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
|
||||||
# Allow multicasts on loopback devices (necessary for testing)
|
|
||||||
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
|
||||||
elif self.proto == "ipv6":
|
|
||||||
# In IPv6 we use the interface index, not the address when subscribing to the group
|
|
||||||
mreq = socket.inet_pton(socket.AF_INET6, self.broadcast_ip)
|
|
||||||
if self.iface is not None:
|
|
||||||
iface_index = socket.if_nametoindex(self.iface)
|
|
||||||
# Send outgoing packets from the same interface
|
|
||||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
|
||||||
mreq += struct.pack(b"@I", iface_index)
|
|
||||||
else:
|
|
||||||
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
|
||||||
self.sock.setsockopt(
|
|
||||||
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
|
||||||
)
|
|
||||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
|
||||||
43
fHDHR/device/station_scan.py
Normal file
43
fHDHR/device/station_scan.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
|
||||||
|
|
||||||
|
class Station_Scan():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scan(self, waitfordone=False):
|
||||||
|
self.fhdhr.logger.info("Channel Scan Requested by Client.")
|
||||||
|
|
||||||
|
scan_status = self.fhdhr.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if scan_status:
|
||||||
|
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
||||||
|
else:
|
||||||
|
self.fhdhr.db.set_fhdhr_value("station_scan", "scanning", 1)
|
||||||
|
|
||||||
|
if waitfordone:
|
||||||
|
self.runscan()
|
||||||
|
else:
|
||||||
|
if self.fhdhr.config.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
chanscan = multiprocessing.Process(target=self.runscan)
|
||||||
|
elif self.fhdhr.config.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
chanscan = threading.Thread(target=self.runscan)
|
||||||
|
if self.fhdhr.config.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
chanscan.start()
|
||||||
|
|
||||||
|
def runscan(self):
|
||||||
|
self.channels.get_channels(forceupdate=True)
|
||||||
|
self.fhdhr.logger.info("Requested Channel Scan Complete.")
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scanning(self):
|
||||||
|
scan_status = self.fhdhr.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if not scan_status:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
@ -1,4 +1,3 @@
|
|||||||
import m3u8
|
|
||||||
|
|
||||||
from fHDHR.exceptions import TunerError
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
@ -12,218 +11,78 @@ class Tuners():
|
|||||||
self.channels = channels
|
self.channels = channels
|
||||||
|
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
|
self.max_tuners = int(self.fhdhr.config.dict["fhdhr"]["tuner_count"])
|
||||||
|
|
||||||
self.tuners = {}
|
self.tuners = {}
|
||||||
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
|
||||||
self.tuners[origin] = {}
|
|
||||||
|
|
||||||
max_tuners = int(self.fhdhr.origins.origins_dict[origin].tuners)
|
self.fhdhr.logger.info("Creating %s tuners." % str(self.max_tuners))
|
||||||
|
|
||||||
self.fhdhr.logger.info("Creating %s tuners for %s." % (max_tuners, origin))
|
for i in range(0, self.max_tuners):
|
||||||
|
self.tuners[str(i)] = Tuner(fhdhr, i, epg)
|
||||||
|
|
||||||
for i in range(0, max_tuners):
|
def tuner_grab(self, tuner_number):
|
||||||
self.tuners[origin][str(i)] = Tuner(fhdhr, i, epg, origin)
|
|
||||||
|
|
||||||
self.alt_stream_handlers = {}
|
if str(tuner_number) not in list(self.tuners.keys()):
|
||||||
|
self.fhdhr.logger.error("Tuner %s does not exist." % str(tuner_number))
|
||||||
def alt_stream_methods_selfadd(self):
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_stream":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name
|
|
||||||
self.alt_stream_handlers[method] = self.fhdhr.plugins.plugins[plugin_name]
|
|
||||||
|
|
||||||
def get_available_tuner(self, origin):
|
|
||||||
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if not self.tuners[origin][tunernum].tuner_lock.locked()) or None
|
|
||||||
|
|
||||||
def get_scanning_tuner(self, origin):
|
|
||||||
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if self.tuners[origin][tunernum].status["status"] == "Scanning") or None
|
|
||||||
|
|
||||||
def stop_tuner_scan(self, origin):
|
|
||||||
tunernum = self.get_scanning_tuner(origin)
|
|
||||||
if tunernum:
|
|
||||||
self.tuners[origin][str(tunernum)].close()
|
|
||||||
|
|
||||||
def tuner_scan(self, origin="all"):
|
|
||||||
"""Temporarily use a tuner for a scan"""
|
|
||||||
|
|
||||||
if origin == "all":
|
|
||||||
origins = list(self.tuners.keys())
|
|
||||||
else:
|
|
||||||
origins = [origin]
|
|
||||||
|
|
||||||
for origin in origins:
|
|
||||||
|
|
||||||
if not self.available_tuner_count(origin):
|
|
||||||
raise TunerError("805 - All Tuners In Use")
|
|
||||||
|
|
||||||
tunernumber = self.get_available_tuner(origin)
|
|
||||||
self.tuners[origin][str(tunernumber)].channel_scan(origin)
|
|
||||||
|
|
||||||
if not tunernumber:
|
|
||||||
raise TunerError("805 - All Tuners In Use")
|
|
||||||
|
|
||||||
def tuner_grab(self, tuner_number, origin, channel_number):
|
|
||||||
|
|
||||||
if str(tuner_number) not in list(self.tuners[origin].keys()):
|
|
||||||
self.fhdhr.logger.error("Tuner %s does not exist for %s." % (tuner_number, origin))
|
|
||||||
raise TunerError("806 - Tune Failed")
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
# TunerError will raise if unavailable
|
# TunerError will raise if unavailable
|
||||||
self.tuners[origin][str(tuner_number)].grab(origin, channel_number)
|
self.tuners[str(tuner_number)].grab()
|
||||||
|
|
||||||
return tuner_number
|
return tuner_number
|
||||||
|
|
||||||
def first_available(self, origin, channel_number, dograb=True):
|
def first_available(self):
|
||||||
|
|
||||||
if not self.available_tuner_count(origin):
|
if not self.available_tuner_count():
|
||||||
raise TunerError("805 - All Tuners In Use")
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
tunernumber = self.get_available_tuner(origin)
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
try:
|
||||||
|
self.tuners[str(tunernum)].grab()
|
||||||
|
except TunerError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return tunernum
|
||||||
|
|
||||||
if not tunernumber:
|
raise TunerError("805 - All Tuners In Use")
|
||||||
raise TunerError("805 - All Tuners In Use")
|
|
||||||
else:
|
|
||||||
self.tuners[origin][str(tunernumber)].grab(origin, channel_number)
|
|
||||||
return tunernumber
|
|
||||||
|
|
||||||
def tuner_close(self, tunernum, origin):
|
def tuner_close(self, tunernum):
|
||||||
self.tuners[origin][str(tunernum)].close()
|
self.tuners[str(tunernum)].close()
|
||||||
|
|
||||||
def status(self, origin=None):
|
def status(self):
|
||||||
all_status = {}
|
all_status = {}
|
||||||
if origin:
|
for tunernum in list(self.tuners.keys()):
|
||||||
for tunernum in list(self.tuners[origin].keys()):
|
all_status[tunernum] = self.tuners[str(tunernum)].get_status()
|
||||||
all_status[tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
|
||||||
else:
|
|
||||||
for origin in list(self.tuners.keys()):
|
|
||||||
all_status[origin] = {}
|
|
||||||
for tunernum in list(self.tuners[origin].keys()):
|
|
||||||
all_status[origin][tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
|
||||||
return all_status
|
return all_status
|
||||||
|
|
||||||
def available_tuner_count(self, origin):
|
def available_tuner_count(self):
|
||||||
available_tuners = 0
|
available_tuners = 0
|
||||||
for tunernum in list(self.tuners[origin].keys()):
|
for tunernum in list(self.tuners.keys()):
|
||||||
if not self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
tuner_status = self.tuners[str(tunernum)].get_status()
|
||||||
|
if tuner_status["status"] == "Inactive":
|
||||||
available_tuners += 1
|
available_tuners += 1
|
||||||
return available_tuners
|
return available_tuners
|
||||||
|
|
||||||
def inuse_tuner_count(self, origin):
|
def inuse_tuner_count(self):
|
||||||
inuse_tuners = 0
|
inuse_tuners = 0
|
||||||
for tunernum in list(self.tuners[origin].keys()):
|
for tunernum in list(self.tuners.keys()):
|
||||||
if self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
tuner_status = self.tuners[str(tunernum)].get_status()
|
||||||
|
if tuner_status["status"] == "Active":
|
||||||
inuse_tuners += 1
|
inuse_tuners += 1
|
||||||
return inuse_tuners
|
return inuse_tuners
|
||||||
|
|
||||||
def get_stream_info(self, stream_args):
|
def get_stream_info(self, stream_args):
|
||||||
|
|
||||||
stream_info = self.channels.get_channel_stream(stream_args, stream_args["origin"])
|
stream_args["channelUri"] = self.channels.get_channel_stream(str(stream_args["channel"]))
|
||||||
if not stream_info:
|
if not stream_args["channelUri"]:
|
||||||
raise TunerError("806 - Tune Failed")
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
if isinstance(stream_info, str):
|
channelUri_headers = self.fhdhr.web.session.head(stream_args["channelUri"]).headers
|
||||||
stream_info = {"url": stream_info, "headers": None}
|
stream_args["true_content_type"] = channelUri_headers['Content-Type']
|
||||||
stream_args["stream_info"] = stream_info
|
|
||||||
|
|
||||||
if not stream_args["stream_info"]["url"]:
|
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
raise TunerError("806 - Tune Failed")
|
|
||||||
|
|
||||||
if "headers" not in list(stream_args["stream_info"].keys()):
|
|
||||||
stream_args["stream_info"]["headers"] = None
|
|
||||||
|
|
||||||
if stream_args["stream_info"]["url"].startswith("udp://"):
|
|
||||||
stream_args["true_content_type"] = "video/mpeg"
|
|
||||||
stream_args["content_type"] = "video/mpeg"
|
stream_args["content_type"] = "video/mpeg"
|
||||||
else:
|
else:
|
||||||
|
stream_args["content_type"] = stream_args["true_content_type"]
|
||||||
channel_stream_url_headers = self.fhdhr.web.session.head(stream_args["stream_info"]["url"]).headers
|
|
||||||
stream_args["true_content_type"] = channel_stream_url_headers['Content-Type']
|
|
||||||
|
|
||||||
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
|
||||||
stream_args["content_type"] = "video/mpeg"
|
|
||||||
if stream_args["origin_quality"] != -1:
|
|
||||||
stream_args["stream_info"]["url"] = self.m3u8_quality(stream_args)
|
|
||||||
else:
|
|
||||||
stream_args["content_type"] = stream_args["true_content_type"]
|
|
||||||
|
|
||||||
return stream_args
|
return stream_args
|
||||||
|
|
||||||
def m3u8_quality(self, stream_args):
|
|
||||||
|
|
||||||
m3u8_url = stream_args["stream_info"]["url"]
|
|
||||||
quality_profile = stream_args["origin_quality"]
|
|
||||||
|
|
||||||
if not quality_profile:
|
|
||||||
if stream_args["method"] == "direct":
|
|
||||||
quality_profile = "high"
|
|
||||||
self.fhdhr.logger.info("Origin Quality not set in config. Direct Method set and will default to Highest Quality")
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.info("Origin Quality not set in config. %s Method will select the Quality Automatically" % stream_args["method"])
|
|
||||||
return m3u8_url
|
|
||||||
else:
|
|
||||||
quality_profile = quality_profile.lower()
|
|
||||||
self.fhdhr.logger.info("Origin Quality set in config to %s" % (quality_profile))
|
|
||||||
|
|
||||||
while True:
|
|
||||||
self.fhdhr.logger.info("Opening m3u8 for reading %s" % m3u8_url)
|
|
||||||
|
|
||||||
try:
|
|
||||||
if stream_args["stream_info"]["headers"]:
|
|
||||||
videoUrlM3u = m3u8.load(m3u8_url, headers=stream_args["stream_info"]["headers"])
|
|
||||||
else:
|
|
||||||
videoUrlM3u = m3u8.load(m3u8_url)
|
|
||||||
except Exception as e:
|
|
||||||
self.fhdhr.logger.info("m3u8 load error: %s" % e)
|
|
||||||
return m3u8_url
|
|
||||||
|
|
||||||
if len(videoUrlM3u.playlists):
|
|
||||||
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
|
||||||
|
|
||||||
# Create list of dicts
|
|
||||||
playlists, playlist_index = {}, 0
|
|
||||||
for playlist_item in videoUrlM3u.playlists:
|
|
||||||
playlist_index += 1
|
|
||||||
playlist_dict = {
|
|
||||||
"url": playlist_item.absolute_uri,
|
|
||||||
"bandwidth": playlist_item.stream_info.bandwidth,
|
|
||||||
}
|
|
||||||
|
|
||||||
if not playlist_item.stream_info.resolution:
|
|
||||||
playlist_dict["width"] = None
|
|
||||||
playlist_dict["height"] = None
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
playlist_dict["width"] = playlist_item.stream_info.resolution[0]
|
|
||||||
playlist_dict["height"] = playlist_item.stream_info.resolution[1]
|
|
||||||
except TypeError:
|
|
||||||
playlist_dict["width"] = None
|
|
||||||
playlist_dict["height"] = None
|
|
||||||
|
|
||||||
playlists[playlist_index] = playlist_dict
|
|
||||||
|
|
||||||
sorted_playlists = sorted(playlists, key=lambda i: (
|
|
||||||
int(playlists[i]['bandwidth']),
|
|
||||||
int(playlists[i]['width'] or 0),
|
|
||||||
int(playlists[i]['height'] or 0)
|
|
||||||
))
|
|
||||||
sorted_playlists = [playlists[x] for x in sorted_playlists]
|
|
||||||
|
|
||||||
if not quality_profile or quality_profile == "high":
|
|
||||||
selected_index = -1
|
|
||||||
elif quality_profile == "medium":
|
|
||||||
selected_index = int((len(sorted_playlists) - 1)/2)
|
|
||||||
elif quality_profile == "low":
|
|
||||||
selected_index = 0
|
|
||||||
|
|
||||||
m3u8_stats = ",".join(
|
|
||||||
["%s %s" % (x, sorted_playlists[selected_index][x])
|
|
||||||
for x in list(sorted_playlists[selected_index].keys())
|
|
||||||
if x != "url" and sorted_playlists[selected_index][x]])
|
|
||||||
self.fhdhr.logger.info("Selected m3u8 details: %s" % m3u8_stats)
|
|
||||||
m3u8_url = sorted_playlists[selected_index]["url"]
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.info("No m3u8 varients found")
|
|
||||||
break
|
|
||||||
|
|
||||||
return m3u8_url
|
|
||||||
|
|||||||
@ -2,6 +2,8 @@
|
|||||||
|
|
||||||
from .direct_stream import Direct_Stream
|
from .direct_stream import Direct_Stream
|
||||||
from .direct_m3u8_stream import Direct_M3U8_Stream
|
from .direct_m3u8_stream import Direct_M3U8_Stream
|
||||||
|
from .ffmpeg_stream import FFMPEG_Stream
|
||||||
|
from .vlc_stream import VLC_Stream
|
||||||
|
|
||||||
|
|
||||||
class Stream():
|
class Stream():
|
||||||
@ -10,14 +12,16 @@ class Stream():
|
|||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.stream_args = stream_args
|
self.stream_args = stream_args
|
||||||
|
|
||||||
if stream_args["method"] == "direct":
|
if stream_args["method"] == "ffmpeg":
|
||||||
if self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
self.method = FFMPEG_Stream(fhdhr, stream_args, tuner)
|
||||||
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
if stream_args["method"] == "vlc":
|
||||||
else:
|
self.method = VLC_Stream(fhdhr, stream_args, tuner)
|
||||||
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
elif (stream_args["method"] == "direct" and
|
||||||
else:
|
not self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
plugin_name = self.fhdhr.config.dict["streaming"]["valid_methods"][stream_args["method"]]["plugin"]
|
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||||
self.method = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, self.fhdhr.plugins.plugins[plugin_name].plugin_utils, stream_args, tuner)
|
elif (stream_args["method"] == "direct" and
|
||||||
|
self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
|
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
return self.method.get()
|
return self.method.get()
|
||||||
|
|||||||
@ -14,17 +14,25 @@ class Direct_M3U8_Stream():
|
|||||||
self.stream_args = stream_args
|
self.stream_args = stream_args
|
||||||
self.tuner = tuner
|
self.tuner = tuner
|
||||||
|
|
||||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
self.chunksize = int(self.fhdhr.config.dict["direct_stream"]['chunksize'])
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
|
|
||||||
if not self.stream_args["duration"] == 0:
|
if not self.stream_args["duration"] == 0:
|
||||||
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
self.fhdhr.logger.info("Detected stream of m3u8 URL: %s" % self.stream_args["stream_info"]["url"])
|
self.fhdhr.logger.info("Detected stream URL is m3u8: %s" % self.stream_args["true_content_type"])
|
||||||
|
|
||||||
if self.stream_args["transcode_quality"]:
|
channelUri = self.stream_args["channelUri"]
|
||||||
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
while True:
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Opening m3u8 for reading %s" % channelUri)
|
||||||
|
videoUrlM3u = m3u8.load(channelUri)
|
||||||
|
if len(videoUrlM3u.playlists):
|
||||||
|
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
||||||
|
channelUri = videoUrlM3u.playlists[0].absolute_uri
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
|
|
||||||
@ -34,16 +42,7 @@ class Direct_M3U8_Stream():
|
|||||||
|
|
||||||
while self.tuner.tuner_lock.locked():
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
try:
|
playlist = m3u8.load(channelUri)
|
||||||
if self.stream_args["stream_info"]["headers"]:
|
|
||||||
playlist = m3u8.load(self.stream_args["stream_info"]["url"], headers=self.stream_args["stream_info"]["headers"])
|
|
||||||
else:
|
|
||||||
playlist = m3u8.load(self.stream_args["stream_info"]["url"])
|
|
||||||
except Exception as e:
|
|
||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
|
||||||
self.tuner.close()
|
|
||||||
return None
|
|
||||||
|
|
||||||
segments = playlist.segments
|
segments = playlist.segments
|
||||||
|
|
||||||
if len(played_chunk_urls):
|
if len(played_chunk_urls):
|
||||||
@ -71,19 +70,13 @@ class Direct_M3U8_Stream():
|
|||||||
self.fhdhr.logger.info("Requested Duration Expired.")
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
|
|
||||||
if self.stream_args["stream_info"]["headers"]:
|
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||||
chunk = self.fhdhr.web.session.get(chunkurl, headers=self.stream_args["stream_info"]["headers"]).content
|
|
||||||
else:
|
|
||||||
chunk = self.fhdhr.web.session.get(chunkurl).content
|
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
# raise TunerError("807 - No Video Data")
|
# raise TunerError("807 - No Video Data")
|
||||||
if key:
|
if key:
|
||||||
if key["url"]:
|
if key["url"]:
|
||||||
if self.stream_args["stream_info"]["headers"]:
|
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||||
keyfile = self.fhdhr.web.session.get(key["url"], headers=self.stream_args["stream_info"]["headers"]).content
|
|
||||||
else:
|
|
||||||
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
|
||||||
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||||
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
||||||
chunk = cryptor.decrypt(chunk)
|
chunk = cryptor.decrypt(chunk)
|
||||||
@ -93,16 +86,16 @@ class Direct_M3U8_Stream():
|
|||||||
yield chunk
|
yield chunk
|
||||||
self.tuner.add_downloaded_size(chunk_size)
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
|
||||||
|
if playlist.target_duration:
|
||||||
|
time.sleep(int(playlist.target_duration))
|
||||||
|
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
except GeneratorExit:
|
except GeneratorExit:
|
||||||
self.fhdhr.logger.info("Connection Closed.")
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
finally:
|
finally:
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
|
||||||
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
# raise TunerError("806 - Tune Failed")
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
|||||||
@ -11,22 +11,16 @@ class Direct_Stream():
|
|||||||
self.stream_args = stream_args
|
self.stream_args = stream_args
|
||||||
self.tuner = tuner
|
self.tuner = tuner
|
||||||
|
|
||||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
self.chunksize = int(self.fhdhr.config.dict["direct_stream"]['chunksize'])
|
||||||
|
|
||||||
def get(self):
|
def get(self):
|
||||||
|
|
||||||
if not self.stream_args["duration"] == 0:
|
if not self.stream_args["duration"] == 0:
|
||||||
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
|
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["channelUri"]))
|
||||||
|
|
||||||
if self.stream_args["transcode_quality"]:
|
req = self.fhdhr.web.session.get(self.stream_args["channelUri"], stream=True)
|
||||||
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
|
||||||
|
|
||||||
if self.stream_args["stream_info"]["headers"]:
|
|
||||||
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True, headers=self.stream_args["stream_info"]["headers"])
|
|
||||||
else:
|
|
||||||
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
|
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
|
|
||||||
@ -36,7 +30,7 @@ class Direct_Stream():
|
|||||||
|
|
||||||
while self.tuner.tuner_lock.locked():
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
for chunk in req.iter_content(chunk_size=self.bytes_per_read):
|
for chunk in req.iter_content(chunk_size=self.chunksize):
|
||||||
|
|
||||||
if (not self.stream_args["duration"] == 0 and
|
if (not self.stream_args["duration"] == 0 and
|
||||||
not time.time() < self.stream_args["time_end"]):
|
not time.time() < self.stream_args["time_end"]):
|
||||||
@ -60,12 +54,9 @@ class Direct_Stream():
|
|||||||
except GeneratorExit:
|
except GeneratorExit:
|
||||||
self.fhdhr.logger.info("Connection Closed.")
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
finally:
|
finally:
|
||||||
req.close()
|
req.close()
|
||||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
|
||||||
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
|
||||||
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
|
||||||
self.tuner.close()
|
self.tuner.close()
|
||||||
# raise TunerError("806 - Tune Failed")
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
|||||||
130
fHDHR/device/tuners/stream/ffmpeg_stream.py
Normal file
130
fHDHR/device/tuners/stream/ffmpeg_stream.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class FFMPEG_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["ffmpeg"]["bytes_per_read"])
|
||||||
|
self.ffmpeg_command = self.ffmpeg_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(self.ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = ffmpeg_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def ffmpeg_command_assemble(self, stream_args):
|
||||||
|
ffmpeg_command = [
|
||||||
|
self.fhdhr.config.dict["ffmpeg"]["path"],
|
||||||
|
"-i", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_duration(stream_args))
|
||||||
|
ffmpeg_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_loglevel())
|
||||||
|
ffmpeg_command.extend(["pipe:stdout"])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_duration(self, stream_args):
|
||||||
|
ffmpeg_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
ffmpeg_command.extend(["-t", str(stream_args["duration"])])
|
||||||
|
else:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-reconnect", "1",
|
||||||
|
"-reconnect_at_eof", "1",
|
||||||
|
"-reconnect_streamed", "1",
|
||||||
|
"-reconnect_delay_max", "2",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_loglevel(self):
|
||||||
|
ffmpeg_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "debug",
|
||||||
|
"info": "info",
|
||||||
|
"error": "error",
|
||||||
|
"warning": "warning",
|
||||||
|
"critical": "fatal",
|
||||||
|
}
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
ffmpeg_command.extend(["-nostats", "-hide_banner"])
|
||||||
|
ffmpeg_command.extend(["-loglevel", loglevel_dict[log_level]])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
ffmpeg_command = []
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-c", "copy",
|
||||||
|
"-f", "mpegts",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
121
fHDHR/device/tuners/stream/vlc_stream.py
Normal file
121
fHDHR/device/tuners/stream/vlc_stream.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class VLC_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["vlc"]["bytes_per_read"])
|
||||||
|
self.vlc_command = self.vlc_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(self.vlc_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = vlc_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def vlc_command_assemble(self, stream_args):
|
||||||
|
vlc_command = [
|
||||||
|
self.fhdhr.config.dict["vlc"]["path"],
|
||||||
|
"-I", "dummy", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
vlc_command.extend(self.vlc_duration(stream_args))
|
||||||
|
vlc_command.extend(self.vlc_loglevel())
|
||||||
|
vlc_command.extend(["--sout"])
|
||||||
|
vlc_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_duration(self, stream_args):
|
||||||
|
vlc_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
vlc_command.extend(["--run-time=%s" % str(stream_args["duration"])])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_loglevel(self):
|
||||||
|
vlc_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "3",
|
||||||
|
"info": "0",
|
||||||
|
"error": "1",
|
||||||
|
"warning": "2",
|
||||||
|
"critical": "1",
|
||||||
|
}
|
||||||
|
vlc_command.extend(["--log-verbose=", loglevel_dict[log_level]])
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
vlc_command.extend(["--quiet"])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
vlc_command = []
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
vlc_transcode_string = "#std{mux=ts,access=file,dst=-}"
|
||||||
|
return [vlc_transcode_string]
|
||||||
|
|
||||||
|
'#transcode{vcodec=mp2v,vb=4096,acodec=mp2a,ab=192,scale=1,channels=2,deinterlace}:std{access=file,mux=ts,dst=-"}'
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
vlc_command.extend([])
|
||||||
|
|
||||||
|
return vlc_command
|
||||||
@ -8,76 +8,41 @@ from .stream import Stream
|
|||||||
|
|
||||||
|
|
||||||
class Tuner():
|
class Tuner():
|
||||||
def __init__(self, fhdhr, inum, epg, origin):
|
def __init__(self, fhdhr, inum, epg):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.number = inum
|
self.number = inum
|
||||||
self.origin = origin
|
|
||||||
self.epg = epg
|
self.epg = epg
|
||||||
|
|
||||||
self.tuner_lock = threading.Lock()
|
self.tuner_lock = threading.Lock()
|
||||||
self.set_off_status()
|
self.set_off_status()
|
||||||
|
|
||||||
self.chanscan_url = "/api/channels?method=scan"
|
|
||||||
self.close_url = "/api/tuners?method=close&tuner=%s&origin=%s" % (self.number, self.origin)
|
|
||||||
|
|
||||||
def channel_scan(self, origin, grabbed=False):
|
|
||||||
if self.tuner_lock.locked() and not grabbed:
|
|
||||||
self.fhdhr.logger.error("%s Tuner #%s is not available." % (self.origin, self.number))
|
|
||||||
raise TunerError("804 - Tuner In Use")
|
|
||||||
|
|
||||||
if self.status["status"] == "Scanning":
|
|
||||||
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
|
||||||
else:
|
|
||||||
|
|
||||||
if not grabbed:
|
|
||||||
self.tuner_lock.acquire()
|
|
||||||
self.status["status"] = "Scanning"
|
|
||||||
self.status["origin"] = origin
|
|
||||||
self.status["time_start"] = datetime.datetime.utcnow()
|
|
||||||
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan for %s origin." % (self.number, origin))
|
|
||||||
|
|
||||||
chanscan = threading.Thread(target=self.runscan, args=(origin,))
|
|
||||||
chanscan.start()
|
|
||||||
|
|
||||||
def runscan(self, origin):
|
|
||||||
self.fhdhr.api.get("%s&origin=%s" % (self.chanscan_url, origin))
|
|
||||||
self.fhdhr.logger.info("Requested Channel Scan for %s origin Complete." % origin)
|
|
||||||
self.close()
|
|
||||||
self.fhdhr.api.get(self.close_url)
|
|
||||||
|
|
||||||
def add_downloaded_size(self, bytes_count):
|
def add_downloaded_size(self, bytes_count):
|
||||||
if "downloaded" in list(self.status.keys()):
|
if "downloaded" in list(self.status.keys()):
|
||||||
self.status["downloaded"] += bytes_count
|
self.status["downloaded"] += bytes_count
|
||||||
|
|
||||||
def grab(self, origin, channel_number):
|
def grab(self):
|
||||||
if self.tuner_lock.locked():
|
if self.tuner_lock.locked():
|
||||||
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
|
self.fhdhr.logger.error("Tuner #" + str(self.number) + " is not available.")
|
||||||
raise TunerError("804 - Tuner In Use")
|
raise TunerError("804 - Tuner In Use")
|
||||||
self.tuner_lock.acquire()
|
self.tuner_lock.acquire()
|
||||||
self.status["status"] = "Acquired"
|
self.status["status"] = "Acquired"
|
||||||
self.status["origin"] = origin
|
self.fhdhr.logger.info("Tuner #" + str(self.number) + " Acquired.")
|
||||||
self.status["channel"] = channel_number
|
|
||||||
self.status["time_start"] = datetime.datetime.utcnow()
|
|
||||||
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
self.set_off_status()
|
self.set_off_status()
|
||||||
if self.tuner_lock.locked():
|
if self.tuner_lock.locked():
|
||||||
self.tuner_lock.release()
|
self.tuner_lock.release()
|
||||||
self.fhdhr.logger.info("Tuner #%s Released." % self.number)
|
self.fhdhr.logger.info("Tuner #" + str(self.number) + " Released.")
|
||||||
|
|
||||||
def get_status(self):
|
def get_status(self):
|
||||||
current_status = self.status.copy()
|
current_status = self.status.copy()
|
||||||
current_status["epg"] = {}
|
if current_status["status"] == "Active":
|
||||||
if current_status["status"] in ["Acquired", "Active", "Scanning"]:
|
current_status["Play Time"] = str(
|
||||||
current_status["running_time"] = str(
|
|
||||||
humanized_time(
|
humanized_time(
|
||||||
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||||
current_status["time_start"] = str(current_status["time_start"])
|
current_status["time_start"] = str(current_status["time_start"])
|
||||||
if current_status["status"] in ["Active"]:
|
current_status["epg"] = self.epg.whats_on_now(current_status["channel"])
|
||||||
if current_status["origin"] in self.epg.epg_methods:
|
|
||||||
current_status["epg"] = self.epg.whats_on_now(current_status["channel"], method=current_status["origin"])
|
|
||||||
return current_status
|
return current_status
|
||||||
|
|
||||||
def set_off_status(self):
|
def set_off_status(self):
|
||||||
@ -85,7 +50,7 @@ class Tuner():
|
|||||||
|
|
||||||
def get_stream(self, stream_args, tuner):
|
def get_stream(self, stream_args, tuner):
|
||||||
stream = Stream(self.fhdhr, stream_args, tuner)
|
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||||
return stream
|
return stream.get()
|
||||||
|
|
||||||
def set_status(self, stream_args):
|
def set_status(self, stream_args):
|
||||||
if self.status["status"] != "Active":
|
if self.status["status"] != "Active":
|
||||||
@ -95,9 +60,8 @@ class Tuner():
|
|||||||
"clients_id": [],
|
"clients_id": [],
|
||||||
"method": stream_args["method"],
|
"method": stream_args["method"],
|
||||||
"accessed": [stream_args["accessed"]],
|
"accessed": [stream_args["accessed"]],
|
||||||
"origin": stream_args["origin"],
|
|
||||||
"channel": stream_args["channel"],
|
"channel": stream_args["channel"],
|
||||||
"proxied_url": stream_args["stream_info"]["url"],
|
"proxied_url": stream_args["channelUri"],
|
||||||
"time_start": datetime.datetime.utcnow(),
|
"time_start": datetime.datetime.utcnow(),
|
||||||
"downloaded": 0
|
"downloaded": 0
|
||||||
}
|
}
|
||||||
|
|||||||
77
fHDHR/http/__init__.py
Normal file
77
fHDHR/http/__init__.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import Flask
|
||||||
|
|
||||||
|
from .pages import fHDHR_Pages
|
||||||
|
from .files import fHDHR_Files
|
||||||
|
from .api import fHDHR_API
|
||||||
|
from .watch import fHDHR_WATCH
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HTTP_Server():
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading Flask.")
|
||||||
|
|
||||||
|
self.app = Flask("fHDHR", template_folder=self.template_folder)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Pages Endpoints.")
|
||||||
|
self.pages = fHDHR_Pages(fhdhr)
|
||||||
|
self.add_endpoints(self.pages, "pages")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Files Endpoints.")
|
||||||
|
self.files = fHDHR_Files(fhdhr)
|
||||||
|
self.add_endpoints(self.files, "files")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP API Endpoints.")
|
||||||
|
self.api = fHDHR_API(fhdhr)
|
||||||
|
self.add_endpoints(self.api, "api")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Stream Endpoints.")
|
||||||
|
self.watch = fHDHR_WATCH(fhdhr)
|
||||||
|
self.add_endpoints(self.watch, "watch")
|
||||||
|
|
||||||
|
def add_endpoints(self, index_list, index_name):
|
||||||
|
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||||
|
for item in item_list:
|
||||||
|
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
|
||||||
|
if isinstance(endpoints, str):
|
||||||
|
endpoints = [endpoints]
|
||||||
|
handler = eval("self." + str(index_name) + "." + str(item))
|
||||||
|
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
|
||||||
|
try:
|
||||||
|
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_methods = ['GET']
|
||||||
|
self.fhdhr.logger.info("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||||
|
for endpoint in endpoints:
|
||||||
|
self.add_endpoint(endpoint=endpoint,
|
||||||
|
endpoint_name=endpoint_name,
|
||||||
|
handler=handler,
|
||||||
|
methods=endpoint_methods)
|
||||||
|
|
||||||
|
def isapath(self, item):
|
||||||
|
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
|
||||||
|
if item in not_a_page_list:
|
||||||
|
return False
|
||||||
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||||
|
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.http = WSGIServer((
|
||||||
|
self.fhdhr.config.dict["fhdhr"]["address"],
|
||||||
|
int(self.fhdhr.config.dict["fhdhr"]["port"])
|
||||||
|
), self.app.wsgi_app)
|
||||||
|
try:
|
||||||
|
self.http.serve_forever()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.http.stop()
|
||||||
@ -1,18 +1,13 @@
|
|||||||
|
|
||||||
from .root_url import Root_URL
|
from .cluster import Cluster
|
||||||
from .startup_tasks import Startup_Tasks
|
|
||||||
|
|
||||||
from .settings import Settings
|
from .settings import Settings
|
||||||
from .channels import Channels
|
from .channels import Channels
|
||||||
|
from .lineup_post import Lineup_Post
|
||||||
from .xmltv import xmlTV
|
from .xmltv import xmlTV
|
||||||
from .m3u import M3U
|
from .m3u import M3U
|
||||||
from .w3u import W3U
|
|
||||||
from .epg import EPG
|
from .epg import EPG
|
||||||
from .tuners import Tuners
|
from .watch import Watch
|
||||||
from .debug import Debug_JSON
|
from .debug import Debug_JSON
|
||||||
from .plugins import Plugins_JSON
|
|
||||||
|
|
||||||
from .route_list import Route_List
|
|
||||||
|
|
||||||
from .images import Images
|
from .images import Images
|
||||||
|
|
||||||
@ -22,19 +17,14 @@ class fHDHR_API():
|
|||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.root_url = Root_URL(fhdhr)
|
self.cluster = Cluster(fhdhr)
|
||||||
self.startup_tasks = Startup_Tasks(fhdhr)
|
|
||||||
|
|
||||||
self.settings = Settings(fhdhr)
|
self.settings = Settings(fhdhr)
|
||||||
self.channels = Channels(fhdhr)
|
self.channels = Channels(fhdhr)
|
||||||
self.xmltv = xmlTV(fhdhr)
|
self.xmltv = xmlTV(fhdhr)
|
||||||
self.m3u = M3U(fhdhr)
|
self.m3u = M3U(fhdhr)
|
||||||
self.w3u = W3U(fhdhr)
|
|
||||||
self.epg = EPG(fhdhr)
|
self.epg = EPG(fhdhr)
|
||||||
self.tuners = Tuners(fhdhr)
|
self.watch = Watch(fhdhr)
|
||||||
self.debug = Debug_JSON(fhdhr)
|
self.debug = Debug_JSON(fhdhr)
|
||||||
self.plugins = Plugins_JSON(fhdhr)
|
self.lineup_post = Lineup_Post(fhdhr)
|
||||||
|
|
||||||
self.route_list = Route_List(fhdhr)
|
|
||||||
|
|
||||||
self.images = Images(fhdhr)
|
self.images = Images(fhdhr)
|
||||||
108
fHDHR/http/api/channels.py
Normal file
108
fHDHR/http/api/channels.py
Normal file
@ -0,0 +1,108 @@
|
|||||||
|
from flask import request, redirect, Response, abort
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
endpoints = ["/api/channels"]
|
||||||
|
endpoint_name = "api_channels"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
channels_info = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url()
|
||||||
|
channel_dict["stream_url"] = channel_obj.stream_url()
|
||||||
|
channels_info.append(channel_dict)
|
||||||
|
channels_info_json = json.dumps(channels_info, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_info_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "favorite":
|
||||||
|
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if not channel:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if channel.startstwith(tuple(["+", "-", "x"])):
|
||||||
|
|
||||||
|
channel_method = channel[0]
|
||||||
|
channel_number = channel[1:]
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if channel_method == "+":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "-":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "x":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown favorite command " + request.args['favorite'])
|
||||||
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
|
elif method in ["enable", "disable"]:
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel, method)
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
channel_id = request.form.get('id', None)
|
||||||
|
updatedict = {}
|
||||||
|
for key in list(request.form.keys()):
|
||||||
|
if key != "id":
|
||||||
|
if key in ["name", "callsign", "thumbnail"]:
|
||||||
|
updatedict[key] = str(request.form.get(key))
|
||||||
|
elif key in ["number"]:
|
||||||
|
updatedict[key] = float(request.form.get(key))
|
||||||
|
elif key in ["enabled"]:
|
||||||
|
confvalue = request.form.get(key)
|
||||||
|
if str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
updatedict[key] = confvalue
|
||||||
|
elif key in ["favorite", "HD"]:
|
||||||
|
updatedict[key] = int(request.form.get(key))
|
||||||
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.station_scan.scan(waitfordone=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
if method == "scan":
|
||||||
|
return redirect('/lineup_status.json')
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
53
fHDHR/http/api/cluster.py
Normal file
53
fHDHR/http/api/cluster.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster():
|
||||||
|
endpoints = ["/api/cluster"]
|
||||||
|
endpoint_name = "api_cluster"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
location = request.args.get("location", default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||||
|
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.ssdp.m_search()
|
||||||
|
|
||||||
|
elif method == 'add':
|
||||||
|
self.fhdhr.device.cluster.add(location)
|
||||||
|
elif method == 'del':
|
||||||
|
self.fhdhr.device.cluster.remove(location)
|
||||||
|
|
||||||
|
elif method == 'sync':
|
||||||
|
self.fhdhr.device.cluster.sync(location)
|
||||||
|
|
||||||
|
elif method == 'leave':
|
||||||
|
self.fhdhr.device.cluster.leave()
|
||||||
|
elif method == 'disconnect':
|
||||||
|
self.fhdhr.device.cluster.disconnect()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
@ -19,16 +19,11 @@ class Debug_JSON():
|
|||||||
|
|
||||||
debugjson = {
|
debugjson = {
|
||||||
"base_url": base_url,
|
"base_url": base_url,
|
||||||
|
"total channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(),
|
||||||
}
|
}
|
||||||
|
cluster_json = json.dumps(debugjson, indent=4)
|
||||||
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
|
||||||
debugjson[origin] = {
|
|
||||||
"tuner status": self.fhdhr.device.tuners.status(origin),
|
|
||||||
"total channels": len(list(self.fhdhr.device.channels.list[origin].keys()))
|
|
||||||
}
|
|
||||||
|
|
||||||
debug_json = json.dumps(debugjson, indent=4)
|
|
||||||
|
|
||||||
return Response(status=200,
|
return Response(status=200,
|
||||||
response=debug_json,
|
response=cluster_json,
|
||||||
mimetype='application/json')
|
mimetype='application/json')
|
||||||
49
fHDHR/http/api/epg.py
Normal file
49
fHDHR/http/api/epg.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/epg"]
|
||||||
|
endpoint_name = "api_epg"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
epg_json = json.dumps(epgdict, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=epg_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
@ -5,11 +5,6 @@ class Images():
|
|||||||
endpoints = ["/api/images"]
|
endpoints = ["/api/images"]
|
||||||
endpoint_name = "api_images"
|
endpoint_name = "api_images"
|
||||||
endpoint_methods = ["GET", "POST"]
|
endpoint_methods = ["GET", "POST"]
|
||||||
endpoint_default_parameters = {
|
|
||||||
"method": "generate",
|
|
||||||
"type": "content",
|
|
||||||
"message": "Internal Image Handling"
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
@ -31,7 +26,7 @@ class Images():
|
|||||||
|
|
||||||
elif method == "get":
|
elif method == "get":
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
if source in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
if source in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
image_type = request.args.get('type', default="content", type=str)
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
if image_type in ["content", "channel"]:
|
if image_type in ["content", "channel"]:
|
||||||
image_id = request.args.get('id', default=None, type=str)
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
@ -1,42 +1,30 @@
|
|||||||
from flask import request, abort, Response
|
from flask import request, abort, Response
|
||||||
|
|
||||||
from fHDHR.exceptions import TunerError
|
|
||||||
|
|
||||||
|
|
||||||
class Lineup_Post():
|
class Lineup_Post():
|
||||||
endpoints = ["/lineup.post", "/hdhr/lineup.post"]
|
endpoints = ["/lineup.post"]
|
||||||
endpoint_name = "hdhr_lineup_post"
|
endpoint_name = "api_lineup_post"
|
||||||
endpoint_methods = ["POST"]
|
endpoint_methods = ["POST"]
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
@property
|
|
||||||
def source(self):
|
|
||||||
return self.fhdhr.config.dict["hdhr"]["source"] or self.fhdhr.origins.valid_origins[0]
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
origin = self.source
|
|
||||||
|
|
||||||
if 'scan' in list(request.args.keys()):
|
if 'scan' in list(request.args.keys()):
|
||||||
|
|
||||||
if request.args['scan'] == 'start':
|
if request.args['scan'] == 'start':
|
||||||
try:
|
self.fhdhr.device.station_scan.scan(waitfordone=False)
|
||||||
self.fhdhr.device.tuners.tuner_scan(origin)
|
|
||||||
except TunerError as e:
|
|
||||||
self.fhdhr.logger.info(str(e))
|
|
||||||
return Response(status=200, mimetype='text/html')
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
elif request.args['scan'] == 'abort':
|
elif request.args['scan'] == 'abort':
|
||||||
self.fhdhr.device.tuners.stop_tuner_scan(origin)
|
|
||||||
return Response(status=200, mimetype='text/html')
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.warning("Unknown scan command %s" % request.args['scan'])
|
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
|
||||||
return abort(200, "Not a valid scan command")
|
return abort(200, "Not a valid scan command")
|
||||||
|
|
||||||
elif 'favorite' in list(request.args.keys()):
|
elif 'favorite' in list(request.args.keys()):
|
||||||
@ -45,21 +33,21 @@ class Lineup_Post():
|
|||||||
channel_method = request.args['favorite'][0]
|
channel_method = request.args['favorite'][0]
|
||||||
channel_number = request.args['favorite'][1:]
|
channel_number = request.args['favorite'][1:]
|
||||||
|
|
||||||
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
response = Response("Not Found", status=404)
|
response = Response("Not Found", status=404)
|
||||||
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
abort(response)
|
abort(response)
|
||||||
|
|
||||||
if channel_method == "+":
|
if channel_method == "+":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
elif channel_method == "-":
|
elif channel_method == "-":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
elif channel_method == "x":
|
elif channel_method == "x":
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle", origin)
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
self.fhdhr.logger.warning("Unknown favorite command " + request.args['favorite'])
|
||||||
return abort(200, "Not a valid favorite command")
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
else:
|
else:
|
||||||
89
fHDHR/http/api/m3u.py
Normal file
89
fHDHR/http/api/m3u.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class M3U():
|
||||||
|
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||||
|
endpoint_name = "api_m3u"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
FORMAT_DESCRIPTOR + " " +
|
||||||
|
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
||||||
|
"x-tvg-url=\"" + xmltvurl + "\"")
|
||||||
|
)
|
||||||
|
|
||||||
|
channel_items = []
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
fileName = "channels.m3u"
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
||||||
|
fileName = str(channel_obj.number) + ".m3u"
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
for channel_obj in channel_items:
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_obj.dict['origin_id'])))
|
||||||
|
else:
|
||||||
|
logourl = channel_obj.thumbnail
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
RECORD_MARKER + ":0" + " " +
|
||||||
|
"channelID=\"" + str(channel_obj.dict['origin_id']) + "\" " +
|
||||||
|
"tvg-chno=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-name=\"" + str(channel_obj.dict['name']) + "\" " +
|
||||||
|
"tvg-id=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-logo=\"" + logourl + "\" " +
|
||||||
|
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "\"," + str(channel_obj.dict['name']))
|
||||||
|
)
|
||||||
|
|
||||||
|
fakefile.write("%s\n" % (base_url + channel_obj.stream_url()))
|
||||||
|
|
||||||
|
channels_m3u = fakefile.getvalue()
|
||||||
|
|
||||||
|
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
|
||||||
|
resp.headers["content-disposition"] = "attachment; filename=" + fileName
|
||||||
|
return resp
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
40
fHDHR/http/api/settings.py
Normal file
40
fHDHR/http/api/settings.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from flask import request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Settings():
|
||||||
|
endpoints = ["/api/settings"]
|
||||||
|
endpoint_name = "api_settings"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "update":
|
||||||
|
config_section = request.form.get('config_section', None)
|
||||||
|
config_name = request.form.get('config_name', None)
|
||||||
|
config_value = request.form.get('config_value', None)
|
||||||
|
|
||||||
|
if not config_section or not config_name or not config_value:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if config_section == "origin":
|
||||||
|
config_section = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.config.write(config_section, config_name, config_value)
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
118
fHDHR/http/api/watch.py
Normal file
118
fHDHR/http/api/watch.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
from flask import Response, request, redirect, abort, stream_with_context
|
||||||
|
import urllib.parse
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Watch():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/watch"]
|
||||||
|
endpoint_name = "api_watch"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
client_address = request.remote_addr
|
||||||
|
|
||||||
|
accessed_url = request.args.get('accessed', default=request.url, type=str)
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
tuner_number = request.args.get('tuner', None, type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
|
||||||
|
channel_number = request.args.get('channel', None, type=str)
|
||||||
|
if not channel_number:
|
||||||
|
return "Missing Channel"
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
channel_dict = self.fhdhr.device.channels.get_channel_dict("number", channel_number)
|
||||||
|
if not channel_dict["enabled"]:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
valid_transcode_types = [None, "heavy", "mobile", "internet720", "internet480", "internet360", "internet240"]
|
||||||
|
if transcode not in valid_transcode_types:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
stream_args = {
|
||||||
|
"channel": channel_number,
|
||||||
|
"method": method,
|
||||||
|
"duration": duration,
|
||||||
|
"transcode": transcode,
|
||||||
|
"accessed": accessed_url,
|
||||||
|
"client": client_address,
|
||||||
|
"client_id": str(client_address) + "_" + str(uuid.uuid4())
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available()
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
tuner.close()
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(tunernum) + " to be used for stream.")
|
||||||
|
tuner.set_status(stream_args)
|
||||||
|
|
||||||
|
if stream_args["method"] == "direct":
|
||||||
|
return Response(tuner.get_stream(stream_args, tuner), content_type=stream_args["content_type"], direct_passthrough=True)
|
||||||
|
elif stream_args["method"] in ["ffmpeg", "vlc"]:
|
||||||
|
return Response(stream_with_context(tuner.get_stream(stream_args, tuner)), mimetype=stream_args["content_type"])
|
||||||
|
|
||||||
|
elif method == "close":
|
||||||
|
|
||||||
|
if not tuner_number or str(tuner_number) not in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
return "%s Invalid tuner" % str(tuner_number)
|
||||||
|
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tuner_number)]
|
||||||
|
tuner.close()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
@ -1,8 +1,7 @@
|
|||||||
from flask import Response, request, redirect, session
|
from flask import Response, request, redirect
|
||||||
import xml.etree.ElementTree
|
import xml.etree.ElementTree
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
import datetime
|
|
||||||
|
|
||||||
from fHDHR.tools import sub_el
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
@ -16,21 +15,14 @@ class xmlTV():
|
|||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.xmltv_offset = {}
|
|
||||||
for epg_method in list(self.fhdhr.device.epg.epg_handling.keys()):
|
|
||||||
if epg_method in list(self.fhdhr.config.dict.keys()):
|
|
||||||
if "xmltv_offset" in list(self.fhdhr.config.dict[epg_method].keys()):
|
|
||||||
self.xmltv_offset[epg_method] = self.fhdhr.config.dict[epg_method]["xmltv_offset"]
|
|
||||||
if epg_method not in list(self.xmltv_offset.keys()):
|
|
||||||
self.xmltv_offset[epg_method] = self.fhdhr.config.dict["epg"]["xmltv_offset"]
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
|
|
||||||
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||||
if session["deviceauth"] != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
||||||
|
if DeviceAuth != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||||
return "not subscribed"
|
return "not subscribed"
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
base_url = request.url_root[:-1]
|
||||||
@ -38,7 +30,7 @@ class xmlTV():
|
|||||||
method = request.args.get('method', default="get", type=str)
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
return "%s Invalid xmltv method" % source
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
@ -46,19 +38,7 @@ class xmlTV():
|
|||||||
if method == "get":
|
if method == "get":
|
||||||
|
|
||||||
epgdict = self.fhdhr.device.epg.get_epg(source)
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
xmltv_xml = self.create_xmltv(base_url, epgdict)
|
||||||
if source in self.fhdhr.origins.valid_origins:
|
|
||||||
epgdict = epgdict.copy()
|
|
||||||
for c in list(epgdict.keys()):
|
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
|
||||||
epgdict[chan_obj.number]["number"] = chan_obj.number
|
|
||||||
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
|
||||||
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
|
||||||
|
|
||||||
xmltv_xml = self.create_xmltv(base_url, epgdict, source)
|
|
||||||
|
|
||||||
return Response(status=200,
|
return Response(status=200,
|
||||||
response=xmltv_xml,
|
response=xmltv_xml,
|
||||||
@ -74,7 +54,7 @@ class xmlTV():
|
|||||||
return "%s Invalid Method" % method
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
if redirect_url:
|
if redirect_url:
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
else:
|
else:
|
||||||
return "%s Success" % method
|
return "%s Success" % method
|
||||||
|
|
||||||
@ -84,7 +64,7 @@ class xmlTV():
|
|||||||
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||||
xmltvgen.set('generator-info-name', 'fHDHR')
|
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||||
xmltvgen.set('generator-info-url', 'fHDHR/%s' % self.fhdhr.config.dict["main"]["reponame"])
|
xmltvgen.set('generator-info-url', 'fHDHR/' + self.fhdhr.config.dict["main"]["reponame"])
|
||||||
return xmltvgen
|
return xmltvgen
|
||||||
|
|
||||||
def xmltv_file(self, xmltvgen):
|
def xmltv_file(self, xmltvgen):
|
||||||
@ -98,31 +78,12 @@ class xmlTV():
|
|||||||
"""This method is called when creation of a full xmltv is not possible"""
|
"""This method is called when creation of a full xmltv is not possible"""
|
||||||
return self.xmltv_file(self.xmltv_headers())
|
return self.xmltv_file(self.xmltv_headers())
|
||||||
|
|
||||||
def timestamp_to_datetime(self, time_start, time_end, source):
|
def create_xmltv(self, base_url, epgdict):
|
||||||
xmltvtimetamps = {}
|
|
||||||
source_offset = self.xmltv_offset[source]
|
|
||||||
for time_item, time_value in zip(["time_start", "time_end"], [time_start, time_end]):
|
|
||||||
timestampval = datetime.datetime.fromtimestamp(time_value).strftime('%Y%m%d%H%M%S')
|
|
||||||
xmltvtimetamps[time_item] = "%s %s" % (timestampval, source_offset)
|
|
||||||
return xmltvtimetamps
|
|
||||||
|
|
||||||
def create_xmltv(self, base_url, epgdict, source):
|
|
||||||
if not epgdict:
|
if not epgdict:
|
||||||
return self.xmltv_empty()
|
return self.xmltv_empty()
|
||||||
epgdict = epgdict.copy()
|
|
||||||
|
|
||||||
out = self.xmltv_headers()
|
out = self.xmltv_headers()
|
||||||
|
|
||||||
if source in self.fhdhr.origins.valid_origins:
|
|
||||||
for c in list(epgdict.keys()):
|
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
|
||||||
epgdict[chan_obj.number]["number"] = chan_obj.number
|
|
||||||
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
|
||||||
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
|
||||||
|
|
||||||
for c in list(epgdict.keys()):
|
for c in list(epgdict.keys()):
|
||||||
|
|
||||||
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||||
@ -134,10 +95,13 @@ class xmlTV():
|
|||||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||||
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||||
|
|
||||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
if epgdict[c]["thumbnail"] is not None:
|
||||||
sub_el(c_out, 'icon', src=("%s/api/images?method=get&type=channel&id=%s" % (base_url, epgdict[c]['id'])))
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=channel&id=" + str(epgdict[c]['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||||
else:
|
else:
|
||||||
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=channel&message=" + urllib.parse.quote(epgdict[c]['name'])))
|
||||||
|
|
||||||
for channelnum in list(epgdict.keys()):
|
for channelnum in list(epgdict.keys()):
|
||||||
|
|
||||||
@ -145,18 +109,16 @@ class xmlTV():
|
|||||||
|
|
||||||
for program in channel_listing:
|
for program in channel_listing:
|
||||||
|
|
||||||
xmltvtimetamps = self.timestamp_to_datetime(program['time_start'], program['time_end'], source)
|
|
||||||
|
|
||||||
prog_out = sub_el(out, 'programme',
|
prog_out = sub_el(out, 'programme',
|
||||||
start=xmltvtimetamps['time_start'],
|
start=program['time_start'],
|
||||||
stop=xmltvtimetamps['time_end'],
|
stop=program['time_end'],
|
||||||
channel=str(channelnum))
|
channel=str(channelnum))
|
||||||
|
|
||||||
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||||
|
|
||||||
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||||
|
|
||||||
sub_el(prog_out, 'sub-title', lang='en', text='Movie: %s' % program['sub-title'])
|
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||||
|
|
||||||
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||||
|
|
||||||
@ -178,11 +140,11 @@ class xmlTV():
|
|||||||
|
|
||||||
if program["thumbnail"]:
|
if program["thumbnail"]:
|
||||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
sub_el(prog_out, 'icon', src=("%s/api/images?method=get&type=content&id=%s" % (base_url, program['id'])))
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=content&id=" + str(program['id'])))
|
||||||
else:
|
else:
|
||||||
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||||
else:
|
else:
|
||||||
sub_el(prog_out, 'icon', src=("%s/api/images?method=generate&type=content&message=%s" % (base_url, urllib.parse.quote(program['title']))))
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=content&message=" + urllib.parse.quote(program['title'])))
|
||||||
|
|
||||||
if program['rating']:
|
if program['rating']:
|
||||||
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||||
@ -1,30 +1,25 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .favicon_ico import Favicon_ICO
|
||||||
|
from .style_css import Style_CSS
|
||||||
|
|
||||||
|
from .device_xml import Device_XML
|
||||||
from .lineup_xml import Lineup_XML
|
from .lineup_xml import Lineup_XML
|
||||||
|
|
||||||
from .discover_json import Discover_JSON
|
from .discover_json import Discover_JSON
|
||||||
from .lineup_json import Lineup_JSON
|
from .lineup_json import Lineup_JSON
|
||||||
from .lineup_status_json import Lineup_Status_JSON
|
from .lineup_status_json import Lineup_Status_JSON
|
||||||
|
|
||||||
from .lineup_post import Lineup_Post
|
|
||||||
from .device_xml import HDHR_Device_XML
|
|
||||||
|
|
||||||
from .auto import Auto
|
class fHDHR_Files():
|
||||||
from .tuner import Tuner
|
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
class Plugin_OBJ():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr, plugin_utils):
|
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.plugin_utils = plugin_utils
|
|
||||||
|
|
||||||
self.lineup_post = Lineup_Post(fhdhr)
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
|
self.style = Style_CSS(fhdhr)
|
||||||
self.device_xml = HDHR_Device_XML(fhdhr)
|
|
||||||
|
|
||||||
self.auto = Auto(fhdhr)
|
|
||||||
self.tuner = Tuner(fhdhr)
|
|
||||||
|
|
||||||
|
self.device_xml = Device_XML(fhdhr)
|
||||||
self.lineup_xml = Lineup_XML(fhdhr)
|
self.lineup_xml = Lineup_XML(fhdhr)
|
||||||
|
|
||||||
self.discover_json = Discover_JSON(fhdhr)
|
self.discover_json = Discover_JSON(fhdhr)
|
||||||
@ -5,36 +5,37 @@ import xml.etree.ElementTree
|
|||||||
from fHDHR.tools import sub_el
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
class Cluster_Device_XML():
|
class Device_XML():
|
||||||
endpoints = ["/cluster/device.xml"]
|
endpoints = ["/device.xml"]
|
||||||
endpoint_name = "cluster_device_xml"
|
endpoint_name = "file_device_xml"
|
||||||
|
|
||||||
def __init__(self, fhdhr, plugin_utils):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
self.plugin_utils = plugin_utils
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
def get(self, *args):
|
def get(self, *args):
|
||||||
"""Device.xml referenced from SSDP"""
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
out = xml.etree.ElementTree.Element('root')
|
out = xml.etree.ElementTree.Element('root')
|
||||||
out.set('xmlns', "upnp:rootdevice")
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
sub_el(out, 'URLBase', "%s" % base_url)
|
sub_el(out, 'URLBase', base_url)
|
||||||
|
|
||||||
specVersion_out = sub_el(out, 'specVersion')
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
sub_el(specVersion_out, 'major', "1")
|
sub_el(specVersion_out, 'major', "1")
|
||||||
sub_el(specVersion_out, 'minor', "0")
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
device_out = sub_el(out, 'device')
|
device_out = sub_el(out, 'device')
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
sub_el(device_out, 'deviceType', "upnp:rootdevice")
|
|
||||||
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
sub_el(device_out, 'UDN', "uuid:%s" % self.fhdhr.config.dict["main"]["uuid"])
|
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||||
|
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
sub_el(device_out, 'UDN', "uuid:" + self.fhdhr.config.dict["main"]["uuid"])
|
||||||
|
|
||||||
fakefile = BytesIO()
|
fakefile = BytesIO()
|
||||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
@ -3,16 +3,12 @@ import json
|
|||||||
|
|
||||||
|
|
||||||
class Discover_JSON():
|
class Discover_JSON():
|
||||||
endpoints = ["/discover.json", "/hdhr/discover.json"]
|
endpoints = ["/discover.json"]
|
||||||
endpoint_name = "hdhr_discover_json"
|
endpoint_name = "file_discover_json"
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
@property
|
|
||||||
def source(self):
|
|
||||||
return self.fhdhr.config.dict["hdhr"]["source"] or self.fhdhr.origins.valid_origins[0]
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
def __call__(self, *args):
|
||||||
return self.get(*args)
|
return self.get(*args)
|
||||||
|
|
||||||
@ -20,19 +16,17 @@ class Discover_JSON():
|
|||||||
|
|
||||||
base_url = request.url_root[:-1]
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
origin = self.source
|
|
||||||
|
|
||||||
jsondiscover = {
|
jsondiscover = {
|
||||||
"FriendlyName": "%s %s" % (self.fhdhr.config.dict["fhdhr"]["friendlyname"], origin),
|
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
"Manufacturer": self.fhdhr.config.dict["hdhr"]["reporting_manufacturer"],
|
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||||
"ModelNumber": self.fhdhr.config.dict["hdhr"]["reporting_model"],
|
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||||
"FirmwareName": self.fhdhr.config.dict["hdhr"]["reporting_firmware_name"],
|
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||||
"TunerCount": self.fhdhr.origins.origins_dict[origin].tuners,
|
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||||
"FirmwareVersion": self.fhdhr.config.dict["hdhr"]["reporting_firmware_ver"],
|
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||||
"DeviceID": "%s%s" % (self.fhdhr.config.dict["main"]["uuid"], origin),
|
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||||
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||||
"BaseURL": "%s/hdhr" % base_url,
|
"BaseURL": base_url,
|
||||||
"LineupURL": "%s/hdhr/lineup.json" % base_url
|
"LineupURL": base_url + "/lineup.json"
|
||||||
}
|
}
|
||||||
discover_json = json.dumps(jsondiscover, indent=4)
|
discover_json = json.dumps(jsondiscover, indent=4)
|
||||||
|
|
||||||
37
fHDHR/http/files/lineup_json.py
Normal file
37
fHDHR/http/files/lineup_json.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_JSON():
|
||||||
|
endpoints = ["/lineup.json"]
|
||||||
|
endpoint_name = "file_lineup_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
jsonlineup = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
lineup_dict = channel_obj.lineup_dict()
|
||||||
|
lineup_dict["URL"] = base_url + lineup_dict["URL"]
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
jsonlineup.append(lineup_dict)
|
||||||
|
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
45
fHDHR/http/files/lineup_status_json.py
Normal file
45
fHDHR/http/files/lineup_status_json.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Status_JSON():
|
||||||
|
endpoints = ["/lineup_status.json"]
|
||||||
|
endpoint_name = "file_lineup_status_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
station_scanning = self.fhdhr.device.station_scan.scanning()
|
||||||
|
if station_scanning:
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
elif not len(self.fhdhr.device.channels.list):
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
else:
|
||||||
|
jsonlineup = self.not_scanning()
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
def scan_in_progress(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": len(self.fhdhr.device.channels.list)
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def not_scanning(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
45
fHDHR/http/files/lineup_xml.py
Normal file
45
fHDHR/http/files/lineup_xml.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_XML():
|
||||||
|
endpoints = ["/lineup.xml"]
|
||||||
|
endpoint_name = "file_lineup_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('Lineup')
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
lineup_dict = channel_obj.lineup_dict()
|
||||||
|
lineup_dict["URL"] = base_url + lineup_dict["URL"]
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
for key in list(lineup_dict.keys()):
|
||||||
|
sub_el(program_out, str(key), str(lineup_dict[key]))
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
lineup_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
17
fHDHR/http/files/style_css.py
Normal file
17
fHDHR/http/files/style_css.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Style_CSS():
|
||||||
|
endpoints = ["/style.css"]
|
||||||
|
endpoint_name = "file_style_css"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||||
|
'style.css')
|
||||||
@ -1,14 +1,16 @@
|
|||||||
|
|
||||||
|
|
||||||
from .index_html import Index_HTML
|
from .index_html import Index_HTML
|
||||||
|
from .origin_html import Origin_HTML
|
||||||
from .channels_html import Channels_HTML
|
from .channels_html import Channels_HTML
|
||||||
from .guide_html import Guide_HTML
|
from .guide_html import Guide_HTML
|
||||||
from .tuners_html import Tuners_HTML
|
from .cluster_html import Cluster_HTML
|
||||||
|
from .streams_html import Streams_HTML
|
||||||
from .xmltv_html import xmlTV_HTML
|
from .xmltv_html import xmlTV_HTML
|
||||||
from .version_html import Version_HTML
|
from .version_html import Version_HTML
|
||||||
from .diagnostics_html import Diagnostics_HTML
|
from .diagnostics_html import Diagnostics_HTML
|
||||||
from .settings_html import Settings_HTML
|
from .settings_html import Settings_HTML
|
||||||
from .channels_editor_html import Channels_Editor_HTML
|
from .channels_editor import Channels_Editor_HTML
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_Pages():
|
class fHDHR_Pages():
|
||||||
@ -17,10 +19,12 @@ class fHDHR_Pages():
|
|||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.index_html = Index_HTML(fhdhr)
|
self.index_html = Index_HTML(fhdhr)
|
||||||
|
self.origin_html = Origin_HTML(fhdhr)
|
||||||
self.channels_html = Channels_HTML(fhdhr)
|
self.channels_html = Channels_HTML(fhdhr)
|
||||||
self.channels_editor_html = Channels_Editor_HTML(fhdhr)
|
self.channels_editor = Channels_Editor_HTML(fhdhr)
|
||||||
self.guide_html = Guide_HTML(fhdhr)
|
self.guide_html = Guide_HTML(fhdhr)
|
||||||
self.tuners_html = Tuners_HTML(fhdhr)
|
self.cluster_html = Cluster_HTML(fhdhr)
|
||||||
|
self.streams_html = Streams_HTML(fhdhr)
|
||||||
self.xmltv_html = xmlTV_HTML(fhdhr)
|
self.xmltv_html = xmlTV_HTML(fhdhr)
|
||||||
self.version_html = Version_HTML(fhdhr)
|
self.version_html = Version_HTML(fhdhr)
|
||||||
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
||||||
23
fHDHR/http/pages/channels_editor.py
Normal file
23
fHDHR/http/pages/channels_editor.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Channels_Editor_HTML():
|
||||||
|
endpoints = ["/channels_editor", "/channels_editor.html"]
|
||||||
|
endpoint_name = "page_channels_editor_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
channelslist = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url()
|
||||||
|
channelslist.append(channel_dict)
|
||||||
|
|
||||||
|
return render_template('channels_editor.html', request=request, fhdhr=self.fhdhr, channelslist=channelslist)
|
||||||
30
fHDHR/http/pages/channels_html.py
Normal file
30
fHDHR/http/pages/channels_html.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Channels_HTML():
|
||||||
|
endpoints = ["/channels", "/channels.html"]
|
||||||
|
endpoint_name = "page_channels_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
channels_dict = {
|
||||||
|
"Total Channels": len(list(self.fhdhr.device.channels.list.keys())),
|
||||||
|
"Enabled": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
channelslist = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url()
|
||||||
|
channelslist.append(channel_dict)
|
||||||
|
if channel_dict["enabled"]:
|
||||||
|
channels_dict["Enabled"] += 1
|
||||||
|
|
||||||
|
return render_template('channels.html', request=request, fhdhr=self.fhdhr, channelslist=channelslist, channels_dict=channels_dict, list=list)
|
||||||
50
fHDHR/http/pages/cluster_html.py
Normal file
50
fHDHR/http/pages/cluster_html.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster_HTML():
|
||||||
|
endpoints = ["/cluster", "/cluster.html"]
|
||||||
|
endpoint_name = "page_cluster_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.location_dict = {
|
||||||
|
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"location": self.fhdhr.device.cluster.location,
|
||||||
|
"joined": "N/A",
|
||||||
|
"url_query": self.fhdhr.device.cluster.location_url
|
||||||
|
}
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
locations_list = []
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
locations_list.append(self.location_dict)
|
||||||
|
|
||||||
|
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
|
||||||
|
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
||||||
|
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
location_name = location_info["FriendlyName"]
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
location_dict = {
|
||||||
|
"name": location_name,
|
||||||
|
"location": location,
|
||||||
|
"joined": str(fhdhr_list[location]["Joined"]),
|
||||||
|
"url_query": urllib.parse.quote(location)
|
||||||
|
}
|
||||||
|
locations_list.append(location_dict)
|
||||||
|
|
||||||
|
return render_template('cluster.html', request=request, fhdhr=self.fhdhr, locations_list=locations_list)
|
||||||
27
fHDHR/http/pages/diagnostics_html.py
Normal file
27
fHDHR/http/pages/diagnostics_html.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Diagnostics_HTML():
|
||||||
|
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||||
|
endpoint_name = "page_diagnostics_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
# a list of 2 part lists containing button information
|
||||||
|
button_list = [
|
||||||
|
["debug.json", "/api/debug"],
|
||||||
|
["device.xml", "device.xml"],
|
||||||
|
["discover.json", "discover.json"],
|
||||||
|
["lineup.json", "lineup.json"],
|
||||||
|
["lineup.xml", "lineup.xml"],
|
||||||
|
["lineup_status.json", "lineup_status.json"],
|
||||||
|
["cluster.json", "/api/cluster?method=get"]
|
||||||
|
]
|
||||||
|
|
||||||
|
return render_template('diagnostics.html', request=request, fhdhr=self.fhdhr, button_list=button_list)
|
||||||
43
fHDHR/http/pages/guide_html.py
Normal file
43
fHDHR/http/pages/guide_html.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
|
||||||
|
class Guide_HTML():
|
||||||
|
endpoints = ["/guide", "/guide.html"]
|
||||||
|
endpoint_name = "page_guide_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
chan_guide_list = []
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.device.epg.def_method, type=str)
|
||||||
|
epg_methods = self.fhdhr.device.epg.valid_epg_methods
|
||||||
|
if source not in epg_methods:
|
||||||
|
source = self.fhdhr.device.epg.def_method
|
||||||
|
|
||||||
|
for channel in self.fhdhr.device.epg.whats_on_allchans(source):
|
||||||
|
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
||||||
|
|
||||||
|
chan_dict = {
|
||||||
|
"name": channel["name"],
|
||||||
|
"number": channel["number"],
|
||||||
|
"chan_thumbnail": channel["thumbnail"],
|
||||||
|
"listing_title": channel["listing"][0]["title"],
|
||||||
|
"listing_thumbnail": channel["listing"][0]["thumbnail"],
|
||||||
|
"listing_description": channel["listing"][0]["description"],
|
||||||
|
"remaining_time": str(remaining_time)
|
||||||
|
}
|
||||||
|
chan_guide_list.append(chan_dict)
|
||||||
|
|
||||||
|
return render_template('guide.html', request=request, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods)
|
||||||
27
fHDHR/http/pages/index_html.py
Normal file
27
fHDHR/http/pages/index_html.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Index_HTML():
|
||||||
|
endpoints = ["/", "/index", "/index.html"]
|
||||||
|
endpoint_name = "page_root_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
||||||
|
max_tuners = self.fhdhr.device.tuners.max_tuners
|
||||||
|
|
||||||
|
fhdhr_status_dict = {
|
||||||
|
"Script Directory": str(self.fhdhr.config.internal["paths"]["script_dir"]),
|
||||||
|
"Config File": str(self.fhdhr.config.config_file),
|
||||||
|
"Cache Path": str(self.fhdhr.config.internal["paths"]["cache_dir"]),
|
||||||
|
"Total Channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"Tuner Usage": ("%s/%s" % (str(tuners_in_use), str(max_tuners))),
|
||||||
|
}
|
||||||
|
|
||||||
|
return render_template('index.html', request=request, fhdhr=self.fhdhr, fhdhr_status_dict=fhdhr_status_dict, list=list)
|
||||||
18
fHDHR/http/pages/origin_html.py
Normal file
18
fHDHR/http/pages/origin_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Origin_HTML():
|
||||||
|
endpoints = ["/origin", "/origin.html"]
|
||||||
|
endpoint_name = "page_origin_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
origin_status_dict = self.fhdhr.origin.get_status_dict()
|
||||||
|
origin_status_dict["Total Channels"] = len(self.fhdhr.device.channels.list)
|
||||||
|
return render_template('origin.html', request=request, fhdhr=self.fhdhr, origin_status_dict=origin_status_dict, list=list)
|
||||||
@ -1,12 +1,9 @@
|
|||||||
from flask import request, render_template, session
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
class Settings_HTML():
|
class Settings_HTML():
|
||||||
endpoints = ["/settings", "/settings.html"]
|
endpoints = ["/settings", "/settings.html"]
|
||||||
endpoint_name = "page_settings_html"
|
endpoint_name = "page_settings_html"
|
||||||
endpoint_access_level = 1
|
|
||||||
endpoint_category = "tool_pages"
|
|
||||||
pretty_name = "Settings"
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
def __init__(self, fhdhr):
|
||||||
self.fhdhr = fhdhr
|
self.fhdhr = fhdhr
|
||||||
@ -22,12 +19,15 @@ class Settings_HTML():
|
|||||||
|
|
||||||
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
||||||
|
real_config_section = config_section
|
||||||
|
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
real_config_section = "origin"
|
||||||
web_settings_dict[config_section][config_item] = {
|
web_settings_dict[config_section][config_item] = {
|
||||||
"value": self.fhdhr.config.dict[config_section][config_item],
|
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
||||||
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
||||||
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
||||||
}
|
}
|
||||||
if not len(web_settings_dict[config_section].keys()):
|
if not len(web_settings_dict[config_section].keys()):
|
||||||
del web_settings_dict[config_section]
|
del web_settings_dict[config_section]
|
||||||
|
|
||||||
return render_template('settings.html', request=request, session=session, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
return render_template('settings.html', request=request, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
||||||
33
fHDHR/http/pages/streams_html.py
Normal file
33
fHDHR/http/pages/streams_html.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_filesize
|
||||||
|
|
||||||
|
|
||||||
|
class Streams_HTML():
|
||||||
|
endpoints = ["/streams", "/streams.html"]
|
||||||
|
endpoint_name = "page_streams_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuner_list = []
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
for tuner in list(tuner_status.keys()):
|
||||||
|
tuner_dict = {
|
||||||
|
"number": str(tuner),
|
||||||
|
"status": str(tuner_status[tuner]["status"]),
|
||||||
|
}
|
||||||
|
if tuner_status[tuner]["status"] == "Active":
|
||||||
|
tuner_dict["channel_number"] = tuner_status[tuner]["channel"]
|
||||||
|
tuner_dict["method"] = tuner_status[tuner]["method"]
|
||||||
|
tuner_dict["play_duration"] = str(tuner_status[tuner]["Play Time"])
|
||||||
|
tuner_dict["downloaded"] = humanized_filesize(tuner_status[tuner]["downloaded"])
|
||||||
|
|
||||||
|
tuner_list.append(tuner_dict)
|
||||||
|
|
||||||
|
return render_template('streams.html', request=request, fhdhr=self.fhdhr, tuner_list=tuner_list)
|
||||||
18
fHDHR/http/pages/version_html.py
Normal file
18
fHDHR/http/pages/version_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Version_HTML():
|
||||||
|
endpoints = ["/version", "/version.html"]
|
||||||
|
endpoint_name = "page_version_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
version_dict = {}
|
||||||
|
for key in list(self.fhdhr.config.internal["versions"].keys()):
|
||||||
|
version_dict[key] = self.fhdhr.config.internal["versions"][key]
|
||||||
|
return render_template('version.html', request=request, fhdhr=self.fhdhr, version_dict=version_dict, list=list)
|
||||||
16
fHDHR/http/pages/xmltv_html.py
Normal file
16
fHDHR/http/pages/xmltv_html.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV_HTML():
|
||||||
|
endpoints = ["/xmltv", "/xmltv.html"]
|
||||||
|
endpoint_name = "page_xmltv_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return render_template('xmltv.html', request=request, fhdhr=self.fhdhr)
|
||||||
12
fHDHR/http/watch/__init__.py
Normal file
12
fHDHR/http/watch/__init__.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
from .auto import Auto
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_WATCH():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.auto = Auto(fhdhr)
|
||||||
|
self.tuner = Tuner(fhdhr)
|
||||||
45
fHDHR/http/watch/auto.py
Normal file
45
fHDHR/http/watch/auto.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Auto():
|
||||||
|
endpoints = ['/auto/<channel>']
|
||||||
|
endpoint_name = "watch_auto"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, channel, *args):
|
||||||
|
return self.get(channel, *args)
|
||||||
|
|
||||||
|
def get(self, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/watch?method=%s" % (method)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
47
fHDHR/http/watch/tuner.py
Normal file
47
fHDHR/http/watch/tuner.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
endpoints = ['/tuner<tuner_number>/<channel>']
|
||||||
|
endpoint_name = "watch_tuner"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, tuner_number, channel, *args):
|
||||||
|
return self.get(tuner_number, channel, *args)
|
||||||
|
|
||||||
|
def get(self, tuner_number, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/watch?method=%s" % (method)
|
||||||
|
|
||||||
|
redirect_url += "&tuner=%s" % str(tuner_number)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
@ -1,36 +0,0 @@
|
|||||||
import os
|
|
||||||
import logging
|
|
||||||
|
|
||||||
|
|
||||||
class Logger():
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
log_level = self.config.dict["logging"]["level"].upper()
|
|
||||||
|
|
||||||
# Create a custom logger
|
|
||||||
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
|
||||||
self.logger = logging.getLogger('fHDHR')
|
|
||||||
log_file = os.path.join(self.config.internal["paths"]["logs_dir"], 'fHDHR.log')
|
|
||||||
|
|
||||||
# Create handlers
|
|
||||||
# c_handler = logging.StreamHandler()
|
|
||||||
f_handler = logging.FileHandler(log_file)
|
|
||||||
# c_handler.setLevel(log_level)
|
|
||||||
f_handler.setLevel(log_level)
|
|
||||||
|
|
||||||
# Create formatters and add it to handlers
|
|
||||||
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
|
||||||
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
|
||||||
# c_handler.setFormatter(c_format)
|
|
||||||
f_handler.setFormatter(f_format)
|
|
||||||
|
|
||||||
# Add handlers to the logger
|
|
||||||
# logger.addHandler(c_handler)
|
|
||||||
self.logger.addHandler(f_handler)
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
''' will only get called for undefined attributes '''
|
|
||||||
if hasattr(self.logger, name):
|
|
||||||
return eval("self.logger.%s" % name)
|
|
||||||
93
fHDHR/origin/__init__.py
Normal file
93
fHDHR/origin/__init__.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
from .origin_service import OriginService
|
||||||
|
from .origin_channels import OriginChannels
|
||||||
|
from .origin_epg import OriginEPG
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginEPG_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OriginServiceWrapper():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.servicename = fhdhr.config.dict["main"]["servicename"]
|
||||||
|
|
||||||
|
self.setup_success = None
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.origin = OriginService(self.fhdhr)
|
||||||
|
self.setup_success = True
|
||||||
|
self.fhdhr.logger.info("%s Setup Success" % self.servicename)
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.fhdhr.logger.error(e)
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
self.channels = OriginChannels(self.fhdhr, self.origin)
|
||||||
|
self.epg = OriginEPG(self.fhdhr)
|
||||||
|
else:
|
||||||
|
self.channels = OriginChannels_StandIN()
|
||||||
|
self.epg = OriginEPG_StandIN()
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return self.channels.get_channels()
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return self.channels.get_channel_stream(chandict)
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return self.epg.update_epg(channels)
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
status_dict = {
|
||||||
|
"Setup": "Success",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_status_dict = self.origin.get_status_dict()
|
||||||
|
for status_key in list(full_status_dict.keys()):
|
||||||
|
status_dict[status_key] = full_status_dict[status_key]
|
||||||
|
return status_dict
|
||||||
|
except AttributeError:
|
||||||
|
return status_dict
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"Setup": "Failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr." + name)
|
||||||
|
if hasattr(self.origin, name):
|
||||||
|
return eval("self.origin." + name)
|
||||||
|
elif hasattr(self.channels, name):
|
||||||
|
return eval("self.channels." + name)
|
||||||
|
elif hasattr(self.epg, name):
|
||||||
|
return eval("self.epg." + name)
|
||||||
|
else:
|
||||||
|
raise AttributeError(name)
|
||||||
61
fHDHR/origin/origin_channels.py
Normal file
61
fHDHR/origin/origin_channels.py
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
import xmltodict
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
|
def get_channel_thumbnail(self, channel_id):
|
||||||
|
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
str(channel_id)
|
||||||
|
))
|
||||||
|
return channel_thumb_url
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
|
||||||
|
data_url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
self.origin.sid
|
||||||
|
))
|
||||||
|
|
||||||
|
data_req = self.fhdhr.web.session.get(data_url)
|
||||||
|
data_dict = xmltodict.parse(data_req.content)
|
||||||
|
|
||||||
|
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||||
|
self.fhdhr.logger.error("Could not retrieve channel list")
|
||||||
|
return []
|
||||||
|
|
||||||
|
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||||
|
|
||||||
|
channel_list = []
|
||||||
|
for c in channel_o_list:
|
||||||
|
dString = json.dumps(c)
|
||||||
|
channel_dict = eval(dString)
|
||||||
|
|
||||||
|
clean_station_item = {
|
||||||
|
"name": channel_dict["name"],
|
||||||
|
"callsign": channel_dict["name"],
|
||||||
|
"number": channel_dict["formatted-number"],
|
||||||
|
"id": channel_dict["id"],
|
||||||
|
"thumbnail": self.get_channel_thumbnail(channel_dict["id"])
|
||||||
|
}
|
||||||
|
channel_list.append(clean_station_item)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
streamurl = ('%s%s:%s/live?channel_id=%s&client=%s' %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
str(chandict["origin_id"]),
|
||||||
|
"fhdhr_" + str(chandict["origin_number"]),
|
||||||
|
))
|
||||||
|
return streamurl
|
||||||
@ -1,45 +1,64 @@
|
|||||||
|
import datetime
|
||||||
import xmltodict
|
import xmltodict
|
||||||
|
|
||||||
import fHDHR.tools
|
import fHDHR.tools
|
||||||
|
|
||||||
|
|
||||||
class Plugin_OBJ():
|
class OriginEPG():
|
||||||
|
|
||||||
def __init__(self, channels, plugin_utils):
|
def __init__(self, fhdhr):
|
||||||
self.plugin_utils = plugin_utils
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
self.channels = channels
|
def get_channel_thumbnail(self, channel_id):
|
||||||
|
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||||
self.origin = plugin_utils.origin
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
str(channel_id)
|
||||||
|
))
|
||||||
|
return channel_thumb_url
|
||||||
|
|
||||||
def get_content_thumbnail(self, content_id):
|
def get_content_thumbnail(self, content_id):
|
||||||
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
||||||
("https://" if self.fhdhr.config.dict["nextpvr"]["ssl"] else "http://",
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
self.fhdhr.config.dict["nextpvr"]["address"],
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
str(self.fhdhr.config.dict["nextpvr"]["port"]),
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
self.fhdhr.config.dict["nextpvr"]["sid"],
|
self.fhdhr.config.dict["origin"]["sid"],
|
||||||
str(content_id)
|
str(content_id)
|
||||||
))
|
))
|
||||||
return item_thumb_url
|
return item_thumb_url
|
||||||
|
|
||||||
|
def xmltimestamp_nextpvr(self, epochtime):
|
||||||
|
xmltime = datetime.datetime.fromtimestamp(int(epochtime)/1000)
|
||||||
|
xmltime = str(xmltime.strftime('%Y%m%d%H%M%S')) + " +0000"
|
||||||
|
return xmltime
|
||||||
|
|
||||||
def duration_nextpvr_minutes(self, starttime, endtime):
|
def duration_nextpvr_minutes(self, starttime, endtime):
|
||||||
return ((int(endtime) - int(starttime))/1000/60)
|
return ((int(endtime) - int(starttime))/1000/60)
|
||||||
|
|
||||||
def update_epg(self):
|
def update_epg(self, fhdhr_channels):
|
||||||
programguide = {}
|
programguide = {}
|
||||||
|
|
||||||
for fhdhr_id in list(self.channels.list.keys()):
|
for c in fhdhr_channels.get_channels():
|
||||||
chan_obj = self.channels.list[fhdhr_id]
|
|
||||||
|
|
||||||
if str(chan_obj.number) not in list(programguide.keys()):
|
cdict = fHDHR.tools.xmldictmaker(c, ["callsign", "name", "number", "id"])
|
||||||
|
|
||||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
if str(cdict['number']) not in list(programguide.keys()):
|
||||||
|
|
||||||
|
programguide[str(cdict['number'])] = {
|
||||||
|
"callsign": cdict["callsign"],
|
||||||
|
"name": cdict["name"] or cdict["callsign"],
|
||||||
|
"number": cdict["number"],
|
||||||
|
"id": str(cdict["origin_id"]),
|
||||||
|
"thumbnail": self.get_channel_thumbnail(cdict['origin_id']),
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
||||||
("https://" if self.fhdhr.config.dict["nextpvr"]["ssl"] else "http://",
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
self.fhdhr.config.dict["nextpvr"]["address"],
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
str(self.fhdhr.config.dict["nextpvr"]["port"]),
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
str(chan_obj.dict["origin_id"]),
|
str(cdict["origin_id"]),
|
||||||
))
|
))
|
||||||
epg_req = self.fhdhr.web.session.get(epg_url)
|
epg_req = self.fhdhr.web.session.get(epg_url)
|
||||||
epg_dict = xmltodict.parse(epg_req.content)
|
epg_dict = xmltodict.parse(epg_req.content)
|
||||||
@ -51,8 +70,8 @@ class Plugin_OBJ():
|
|||||||
progdict = fHDHR.tools.xmldictmaker(program_item, ["start", "end", "title", "name", "subtitle", "rating", "description", "season", "episode", "id", "episodeTitle"])
|
progdict = fHDHR.tools.xmldictmaker(program_item, ["start", "end", "title", "name", "subtitle", "rating", "description", "season", "episode", "id", "episodeTitle"])
|
||||||
|
|
||||||
clean_prog_dict = {
|
clean_prog_dict = {
|
||||||
"time_start": (int(progdict["start"]) / 1000),
|
"time_start": self.xmltimestamp_nextpvr(progdict["start"]),
|
||||||
"time_end": (int(progdict["end"]) / 1000),
|
"time_end": self.xmltimestamp_nextpvr(progdict["end"]),
|
||||||
"duration_minutes": self.duration_nextpvr_minutes(progdict["start"], progdict["end"]),
|
"duration_minutes": self.duration_nextpvr_minutes(progdict["start"], progdict["end"]),
|
||||||
"thumbnail": self.get_content_thumbnail(progdict['id']),
|
"thumbnail": self.get_content_thumbnail(progdict['id']),
|
||||||
"title": progdict['name'] or "Unavailable",
|
"title": progdict['name'] or "Unavailable",
|
||||||
@ -65,7 +84,7 @@ class Plugin_OBJ():
|
|||||||
"seasonnumber": progdict['season'],
|
"seasonnumber": progdict['season'],
|
||||||
"episodenumber": progdict['episode'],
|
"episodenumber": progdict['episode'],
|
||||||
"isnew": False,
|
"isnew": False,
|
||||||
"id": str(progdict['id'] or "%s_%s" % (chan_obj.dict['origin_id'], progdict["start"])),
|
"id": str(progdict['id'] or self.xmltimestamp_nextpvr(progdict["start"])),
|
||||||
}
|
}
|
||||||
|
|
||||||
if 'genre' in list(progdict.keys()):
|
if 'genre' in list(progdict.keys()):
|
||||||
@ -78,7 +97,6 @@ class Plugin_OBJ():
|
|||||||
|
|
||||||
# TODO isNEW
|
# TODO isNEW
|
||||||
|
|
||||||
if not any((d['time_start'] == clean_prog_dict['time_start'] and d['id'] == clean_prog_dict['id']) for d in programguide[chan_obj.number]["listing"]):
|
programguide[str(cdict["number"])]["listing"].append(clean_prog_dict)
|
||||||
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
|
|
||||||
|
|
||||||
return programguide
|
return programguide
|
||||||
70
fHDHR/origin/origin_service.py
Normal file
70
fHDHR/origin/origin_service.py
Normal file
@ -0,0 +1,70 @@
|
|||||||
|
import xmltodict
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginService():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.login()
|
||||||
|
|
||||||
|
def login(self):
|
||||||
|
self.fhdhr.logger.info("Logging into NextPVR")
|
||||||
|
self.sid = self.get_sid()
|
||||||
|
if not self.sid:
|
||||||
|
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("NextPVR Login Success")
|
||||||
|
self.fhdhr.config.write(self.fhdhr.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
||||||
|
|
||||||
|
def get_sid(self):
|
||||||
|
if self.fhdhr.config.dict["origin"]["sid"]:
|
||||||
|
return self.fhdhr.config.dict["origin"]["sid"]
|
||||||
|
|
||||||
|
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
))
|
||||||
|
|
||||||
|
initiate_req = self.fhdhr.web.session.get(initiate_url)
|
||||||
|
initiate_dict = xmltodict.parse(initiate_req.content)
|
||||||
|
|
||||||
|
sid = initiate_dict['rsp']['sid']
|
||||||
|
salt = initiate_dict['rsp']['salt']
|
||||||
|
md5PIN = hashlib.md5(str(self.fhdhr.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
||||||
|
string = ':%s:%s' % (md5PIN, salt)
|
||||||
|
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||||
|
|
||||||
|
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
sid,
|
||||||
|
clientKey
|
||||||
|
))
|
||||||
|
login_req = self.fhdhr.web.session.get(login_url)
|
||||||
|
login_dict = xmltodict.parse(login_req.content)
|
||||||
|
|
||||||
|
loginsuccess = None
|
||||||
|
if login_dict['rsp']['@stat'] == "ok":
|
||||||
|
if login_dict['rsp']['allow_watch'] == "true":
|
||||||
|
loginsuccess = sid
|
||||||
|
|
||||||
|
return loginsuccess
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
nextpvr_address = ('%s%s:%s' %
|
||||||
|
("https://" if self.fhdhr.config.dict["origin"]["ssl"] else "http://",
|
||||||
|
self.fhdhr.config.dict["origin"]["address"],
|
||||||
|
str(self.fhdhr.config.dict["origin"]["port"]),
|
||||||
|
))
|
||||||
|
ret_status_dict = {
|
||||||
|
"Login": "Success",
|
||||||
|
"Address": nextpvr_address,
|
||||||
|
}
|
||||||
|
return ret_status_dict
|
||||||
@ -1,48 +0,0 @@
|
|||||||
|
|
||||||
import fHDHR.exceptions
|
|
||||||
|
|
||||||
|
|
||||||
class Origin_StandIN():
|
|
||||||
def __init__(self):
|
|
||||||
self.setup_success = False
|
|
||||||
|
|
||||||
def get_channels(self):
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_channel_stream(self, chandict, stream_args):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Origins():
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.origins_dict = {}
|
|
||||||
self.origin_selfadd()
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"] and self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod_type"] == "origin":
|
|
||||||
self.fhdhr.plugins.plugins[plugin_name].plugin_utils.origin = self.origins_dict[self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"].lower()]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def valid_origins(self):
|
|
||||||
return [origin for origin in list(self.origins_dict.keys())]
|
|
||||||
|
|
||||||
def origin_selfadd(self):
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].type == "origin":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
|
||||||
try:
|
|
||||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
|
||||||
self.origins_dict[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(plugin_utils)
|
|
||||||
self.fhdhr.logger.info("%s Setup Success" % method)
|
|
||||||
self.origins_dict[method].setup_success = True
|
|
||||||
except fHDHR.exceptions.OriginSetupError as e:
|
|
||||||
self.fhdhr.logger.error(e)
|
|
||||||
self.origins_dict[method] = Origin_StandIN()
|
|
||||||
|
|
||||||
if not hasattr(self.origins_dict[method], 'tuners'):
|
|
||||||
self.origins_dict[method].tuners = 4
|
|
||||||
|
|
||||||
if not hasattr(self.origins_dict[method], 'stream_method'):
|
|
||||||
self.origins_dict[method].stream_method = self.fhdhr.config.dict["streaming"]["method"]
|
|
||||||
@ -1,250 +0,0 @@
|
|||||||
import os
|
|
||||||
import imp
|
|
||||||
import json
|
|
||||||
|
|
||||||
|
|
||||||
class Plugin_DB():
|
|
||||||
def __init__(self, db, name):
|
|
||||||
self._db = db
|
|
||||||
self.name = name
|
|
||||||
self.namespace = name.lower()
|
|
||||||
|
|
||||||
# fhdhr
|
|
||||||
def set_fhdhr_value(self, pluginitem, key, value, namespace="default"):
|
|
||||||
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
|
||||||
return
|
|
||||||
|
|
||||||
def get_fhdhr_value(self, pluginitem, key, namespace="default"):
|
|
||||||
return self._db.get_fhdhr_value(pluginitem, key, namespace=namespace.lower())
|
|
||||||
|
|
||||||
def delete_fhdhr_value(self, pluginitem, key, namespace="default"):
|
|
||||||
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
|
||||||
return
|
|
||||||
|
|
||||||
# Plugin
|
|
||||||
def set_plugin_value(self, pluginitem, key, value, namespace=None):
|
|
||||||
if not namespace:
|
|
||||||
namespace = self.namespace
|
|
||||||
elif namespace.lower() != self.namespace:
|
|
||||||
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
|
||||||
return
|
|
||||||
return self._db.set_plugin_value(pluginitem, key, value, namespace=self.namespace)
|
|
||||||
|
|
||||||
def get_plugin_value(self, pluginitem, key, namespace=None):
|
|
||||||
if not namespace:
|
|
||||||
namespace = self.namespace
|
|
||||||
return self._db.get_plugin_value(pluginitem, key, namespace=namespace.lower())
|
|
||||||
|
|
||||||
def delete_plugin_value(self, pluginitem, key, namespace=None):
|
|
||||||
if not namespace:
|
|
||||||
namespace = self.namespace
|
|
||||||
elif namespace.lower() != self.namespace:
|
|
||||||
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
|
||||||
return
|
|
||||||
return self._db.delete_plugin_value(pluginitem, key, namespace=self.namespace)
|
|
||||||
|
|
||||||
|
|
||||||
class Plugin_Config():
|
|
||||||
def __init__(self, config, name):
|
|
||||||
self._config = config
|
|
||||||
self.name = name
|
|
||||||
self.namespace = name.lower()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def dict(self):
|
|
||||||
return self._config.dict.copy()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def internal(self):
|
|
||||||
return self._config.internal.copy()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def conf_default(self):
|
|
||||||
return self._config.conf_default.copy()
|
|
||||||
|
|
||||||
def write(self, key, value, namespace=None):
|
|
||||||
if not namespace:
|
|
||||||
namespace = self.namespace
|
|
||||||
elif str(namespace).lower() != self.namespace:
|
|
||||||
print("%s plugin is not allowed write access to fhdhr config namespaces." % self.name)
|
|
||||||
return
|
|
||||||
return self._config.write(key, value, self.namespace)
|
|
||||||
|
|
||||||
|
|
||||||
class Plugin_Utils():
|
|
||||||
|
|
||||||
def __init__(self, config, logger, db, plugin_name, plugin_manifest, modname):
|
|
||||||
self.config = Plugin_Config(config, plugin_manifest["name"])
|
|
||||||
self.db = Plugin_DB(db, plugin_manifest["name"])
|
|
||||||
self.logger = logger
|
|
||||||
self.namespace = plugin_manifest["name"].lower()
|
|
||||||
self.plugin_name = plugin_name
|
|
||||||
self.plugin_manifest = plugin_manifest
|
|
||||||
self.origin = None
|
|
||||||
|
|
||||||
|
|
||||||
class Plugin():
|
|
||||||
|
|
||||||
def __init__(self, config, logger, db, plugin_name, plugin_path, plugin_conf, plugin_manifest):
|
|
||||||
self.config = config
|
|
||||||
self.db = db
|
|
||||||
self.logger = logger
|
|
||||||
|
|
||||||
# Gather Info about Plugin
|
|
||||||
self.plugin_name = plugin_name
|
|
||||||
self.modname = os.path.basename(plugin_path)
|
|
||||||
self.path = plugin_path
|
|
||||||
self.module_type = imp.PKG_DIRECTORY
|
|
||||||
self.multi_plugin = (self.plugin_name != self.modname)
|
|
||||||
self.default_conf = plugin_conf
|
|
||||||
self.manifest = plugin_manifest
|
|
||||||
|
|
||||||
if self.multi_plugin:
|
|
||||||
self.plugin_dict_name = "%s_%s" % (plugin_name, self.modname)
|
|
||||||
else:
|
|
||||||
self.plugin_dict_name = plugin_name
|
|
||||||
|
|
||||||
self.plugin_utils = Plugin_Utils(config, logger, db, plugin_name, plugin_manifest, self.modname)
|
|
||||||
|
|
||||||
# Load the module
|
|
||||||
self._module = self._load()
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
|
|
||||||
if self.type == "alt_epg":
|
|
||||||
self.config.register_valid_epg_method(self.name, self.plugin_dict_name)
|
|
||||||
elif self.type == "alt_stream":
|
|
||||||
self.config.register_valid_streaming_method(self.name, self.plugin_dict_name)
|
|
||||||
elif self.type == "web":
|
|
||||||
self.config.register_web_path(self.manifest["name"], self.path, self.plugin_dict_name)
|
|
||||||
|
|
||||||
if self.has_setup():
|
|
||||||
self._module.setup(self)
|
|
||||||
|
|
||||||
def has_setup(self):
|
|
||||||
return hasattr(self._module, 'setup')
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
description = ('', '', self.module_type)
|
|
||||||
mod = imp.load_module(self.plugin_dict_name, None, self.path, description)
|
|
||||||
return mod
|
|
||||||
|
|
||||||
@property
|
|
||||||
def name(self):
|
|
||||||
return self.manifest["name"]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def version(self):
|
|
||||||
return self.manifest["version"]
|
|
||||||
|
|
||||||
@property
|
|
||||||
def type(self):
|
|
||||||
return self.manifest["type"]
|
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
''' will only get called for undefined attributes '''
|
|
||||||
if name == "Plugin_OBJ":
|
|
||||||
return self._module.Plugin_OBJ
|
|
||||||
|
|
||||||
|
|
||||||
class PluginsHandler():
|
|
||||||
|
|
||||||
def __init__(self, settings):
|
|
||||||
self.config = settings
|
|
||||||
|
|
||||||
self.plugins = {}
|
|
||||||
|
|
||||||
self.found_plugins = []
|
|
||||||
self.found_plugins_conf = []
|
|
||||||
self.list_plugins()
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
for plugin_name in list(self.plugins.keys()):
|
|
||||||
self.plugins[plugin_name].setup()
|
|
||||||
|
|
||||||
def load_plugin_configs(self):
|
|
||||||
for file_item_path in self.found_plugins_conf:
|
|
||||||
self.config.import_conf_json(file_item_path)
|
|
||||||
|
|
||||||
def list_plugins(self):
|
|
||||||
for directory in self.config.internal["paths"]["plugins_dir"]:
|
|
||||||
|
|
||||||
base = os.path.abspath(directory)
|
|
||||||
for filename in os.listdir(base):
|
|
||||||
abspath = os.path.join(base, filename)
|
|
||||||
|
|
||||||
if os.path.isdir(abspath):
|
|
||||||
|
|
||||||
plugin_conf = []
|
|
||||||
for subfilename in os.listdir(abspath):
|
|
||||||
subabspath = os.path.join(abspath, subfilename)
|
|
||||||
if subfilename.endswith("_conf.json"):
|
|
||||||
plugin_conf.append(subabspath)
|
|
||||||
self.found_plugins_conf.append(subabspath)
|
|
||||||
|
|
||||||
# Plugin/multi-plugin must have a basic manifest json
|
|
||||||
conffilepath = os.path.join(abspath, 'plugin.json')
|
|
||||||
if os.path.isfile(conffilepath):
|
|
||||||
plugin_manifest = json.load(open(conffilepath, 'r'))
|
|
||||||
|
|
||||||
for plugin_man_item in ["name", "version", "type"]:
|
|
||||||
if plugin_man_item not in list(plugin_manifest.keys()):
|
|
||||||
plugin_manifest[plugin_man_item] = None
|
|
||||||
|
|
||||||
self.config.register_version(os.path.basename(filename), plugin_manifest["version"], "plugin")
|
|
||||||
|
|
||||||
if plugin_manifest["type"] == "origin":
|
|
||||||
self.config.register_valid_origin_method(plugin_manifest["name"])
|
|
||||||
|
|
||||||
plugin_import_print_string = "Found %s type plugin: %s %s. " % (plugin_manifest["type"], plugin_manifest["name"], plugin_manifest["version"])
|
|
||||||
|
|
||||||
# Warn for multiple origins
|
|
||||||
if plugin_manifest["type"] == "origin" and len([plugin_name for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins if plugin_manifest["type"] == "origin"]):
|
|
||||||
plugin_import_print_string += " ImportWarning: Only one Origin Allowed."
|
|
||||||
|
|
||||||
if not any(plugin_manifest[plugin_item] for plugin_item in ["name", "version", "type"]):
|
|
||||||
plugin_import_print_string += " ImportWarning: Missing PLUGIN_* Value."
|
|
||||||
else:
|
|
||||||
|
|
||||||
# Single Plugin
|
|
||||||
if os.path.isfile(os.path.join(abspath, '__init__.py')):
|
|
||||||
plugin_manifest["tagged_mod"] = None
|
|
||||||
plugin_manifest["tagged_mod_type"] = None
|
|
||||||
self.found_plugins.append((os.path.basename(filename), abspath, plugin_conf, plugin_manifest))
|
|
||||||
|
|
||||||
else:
|
|
||||||
|
|
||||||
# Multi-Plugin
|
|
||||||
for subfilename in os.listdir(abspath):
|
|
||||||
subabspath = os.path.join(abspath, subfilename)
|
|
||||||
|
|
||||||
if os.path.isdir(subabspath):
|
|
||||||
|
|
||||||
subconffilepath = os.path.join(subabspath, 'plugin.json')
|
|
||||||
if os.path.isfile(subconffilepath):
|
|
||||||
subplugin_manifest = json.load(open(subconffilepath, 'r'))
|
|
||||||
|
|
||||||
for subplugin_man_item in ["name", "version", "type"]:
|
|
||||||
if subplugin_man_item not in list(subplugin_manifest.keys()):
|
|
||||||
subplugin_manifest[subplugin_man_item] = plugin_manifest[subplugin_man_item]
|
|
||||||
else:
|
|
||||||
subplugin_manifest = plugin_manifest
|
|
||||||
|
|
||||||
subplugin_manifest["tagged_mod"] = None
|
|
||||||
subplugin_manifest["tagged_mod_type"] = None
|
|
||||||
if plugin_manifest["type"] != subplugin_manifest["type"]:
|
|
||||||
subplugin_manifest["tagged_mod"] = plugin_manifest["name"]
|
|
||||||
subplugin_manifest["tagged_mod_type"] = plugin_manifest["type"]
|
|
||||||
|
|
||||||
if os.path.isfile(os.path.join(subabspath, '__init__.py')):
|
|
||||||
self.found_plugins.append((os.path.basename(filename), subabspath, plugin_conf, subplugin_manifest))
|
|
||||||
|
|
||||||
print(plugin_import_print_string)
|
|
||||||
self.load_plugin_configs()
|
|
||||||
|
|
||||||
def load_plugins(self, logger, db):
|
|
||||||
self.logger = logger
|
|
||||||
self.db = db
|
|
||||||
for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins:
|
|
||||||
plugin_item = Plugin(self.config, self.logger, self.db, plugin_name, plugin_path, plugin_conf, plugin_manifest)
|
|
||||||
self.plugins[plugin_item.plugin_dict_name] = plugin_item
|
|
||||||
@ -8,19 +8,6 @@ UNARY_OPS = (ast.UAdd, ast.USub)
|
|||||||
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
||||||
|
|
||||||
|
|
||||||
def channel_sort(channel_list):
|
|
||||||
"""Take a list of channel number strings and sort the Numbers and SubNumbers"""
|
|
||||||
chan_dict_list_split = {}
|
|
||||||
for number in channel_list:
|
|
||||||
try:
|
|
||||||
subnumber = number.split(".")[1]
|
|
||||||
except IndexError:
|
|
||||||
subnumber = None
|
|
||||||
prinumber = number.split(".")[0]
|
|
||||||
chan_dict_list_split[number] = {"number": prinumber, "subnumber": subnumber}
|
|
||||||
return sorted(chan_dict_list_split, key=lambda i: (int(chan_dict_list_split[i]['number']), int(chan_dict_list_split[i]['subnumber'] or 0)))
|
|
||||||
|
|
||||||
|
|
||||||
def is_docker():
|
def is_docker():
|
||||||
path = "/proc/self/cgroup"
|
path = "/proc/self/cgroup"
|
||||||
if not os.path.isfile(path):
|
if not os.path.isfile(path):
|
||||||
@ -32,8 +19,8 @@ def is_docker():
|
|||||||
return False
|
return False
|
||||||
|
|
||||||
|
|
||||||
def sub_el(parent, sub_el_item_name, text=None, **kwargs):
|
def sub_el(parent, name, text=None, **kwargs):
|
||||||
el = xml.etree.ElementTree.SubElement(parent, sub_el_item_name, **kwargs)
|
el = xml.etree.ElementTree.SubElement(parent, name, **kwargs)
|
||||||
if text:
|
if text:
|
||||||
el.text = text
|
el.text = text
|
||||||
return el
|
return el
|
||||||
@ -133,9 +120,9 @@ def humanized_time(countdownseconds):
|
|||||||
if currenttimevar > 1:
|
if currenttimevar > 1:
|
||||||
timetype = str(x+"s")
|
timetype = str(x+"s")
|
||||||
if displaymsg:
|
if displaymsg:
|
||||||
displaymsg = "%s %s %s" % (displaymsg, int(currenttimevar), timetype)
|
displaymsg = str(displaymsg + " " + str(int(currenttimevar)) + " " + timetype)
|
||||||
else:
|
else:
|
||||||
displaymsg = "%s %s" % (int(currenttimevar), timetype)
|
displaymsg = str(str(int(currenttimevar)) + " " + timetype)
|
||||||
if not displaymsg:
|
if not displaymsg:
|
||||||
return "just now"
|
return "just now"
|
||||||
return displaymsg
|
return displaymsg
|
||||||
@ -147,8 +134,3 @@ class WebReq():
|
|||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
self.exceptions = requests.exceptions
|
self.exceptions = requests.exceptions
|
||||||
|
|
||||||
def __getattr__(self, name):
|
|
||||||
''' will only get called for undefined attributes '''
|
|
||||||
if hasattr(self.session, name):
|
|
||||||
return eval("self.session.%s" % name)
|
|
||||||
|
|||||||
@ -1,229 +0,0 @@
|
|||||||
from gevent.pywsgi import WSGIServer
|
|
||||||
from flask import Flask, request, session
|
|
||||||
import threading
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from .pages import fHDHR_Pages
|
|
||||||
from .files import fHDHR_Files
|
|
||||||
from .brython import fHDHR_Brython
|
|
||||||
from .api import fHDHR_API
|
|
||||||
|
|
||||||
|
|
||||||
fHDHR_web_VERSION = "v0.8.1-beta"
|
|
||||||
|
|
||||||
|
|
||||||
class fHDHR_HTTP_Server():
|
|
||||||
app = None
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
|
|
||||||
|
|
||||||
self.fhdhr.logger.info("Loading Flask.")
|
|
||||||
|
|
||||||
self.fhdhr.app = Flask("fHDHR", template_folder=self.template_folder)
|
|
||||||
self.instance_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# Allow Internal API Usage
|
|
||||||
self.fhdhr.app.testing = True
|
|
||||||
self.fhdhr.api.client = self.fhdhr.app.test_client()
|
|
||||||
|
|
||||||
# Set Secret Key For Sessions
|
|
||||||
self.fhdhr.app.secret_key = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
|
||||||
|
|
||||||
self.route_list = {}
|
|
||||||
|
|
||||||
self.endpoints_obj = {}
|
|
||||||
self.endpoints_obj["pages"] = fHDHR_Pages(fhdhr)
|
|
||||||
self.endpoints_obj["files"] = fHDHR_Files(fhdhr)
|
|
||||||
self.endpoints_obj["brython"] = fHDHR_Brython(fhdhr)
|
|
||||||
self.endpoints_obj["api"] = fHDHR_API(fhdhr)
|
|
||||||
|
|
||||||
self.selfadd_web_plugins()
|
|
||||||
for endpoint_type in list(self.endpoints_obj.keys()):
|
|
||||||
self.fhdhr.logger.info("Loading HTTP %s Endpoints." % endpoint_type)
|
|
||||||
self.add_endpoints(endpoint_type)
|
|
||||||
|
|
||||||
self.fhdhr.app.before_request(self.before_request)
|
|
||||||
self.fhdhr.app.after_request(self.after_request)
|
|
||||||
self.fhdhr.app.before_first_request(self.before_first_request)
|
|
||||||
|
|
||||||
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
|
|
||||||
|
|
||||||
def selfadd_web_plugins(self):
|
|
||||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
|
||||||
if self.fhdhr.plugins.plugins[plugin_name].type == "web":
|
|
||||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
|
||||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
|
||||||
try:
|
|
||||||
self.endpoints_obj[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils)
|
|
||||||
except Exception as e:
|
|
||||||
print(e)
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
self.fhdhr.logger.info("Flask HTTP Thread Starting")
|
|
||||||
self.fhdhr.threads["flask"].start()
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
self.fhdhr.logger.info("Flask HTTP Thread Stopping")
|
|
||||||
self.http.stop()
|
|
||||||
|
|
||||||
def before_first_request(self):
|
|
||||||
self.fhdhr.logger.info("HTTP Server Online.")
|
|
||||||
|
|
||||||
def before_request(self):
|
|
||||||
|
|
||||||
session["session_id"] = str(uuid.uuid4())
|
|
||||||
session["instance_id"] = self.instance_id
|
|
||||||
session["route_list"] = self.route_list
|
|
||||||
|
|
||||||
session["user_agent"] = request.headers.get('User-Agent')
|
|
||||||
|
|
||||||
session["is_internal_api"] = self.detect_internal_api(request)
|
|
||||||
if session["is_internal_api"]:
|
|
||||||
self.fhdhr.logger.debug("Client is using internal API call.")
|
|
||||||
|
|
||||||
session["is_mobile"] = self.detect_mobile(request)
|
|
||||||
if session["is_mobile"]:
|
|
||||||
self.fhdhr.logger.debug("Client is a mobile device.")
|
|
||||||
|
|
||||||
session["is_plexmediaserver"] = self.detect_plexmediaserver(request)
|
|
||||||
if session["is_plexmediaserver"]:
|
|
||||||
self.fhdhr.logger.debug("Client is a Plex Media Server.")
|
|
||||||
|
|
||||||
session["deviceauth"] = self.detect_plexmediaserver(request)
|
|
||||||
|
|
||||||
session["tuner_used"] = None
|
|
||||||
|
|
||||||
session["restart"] = False
|
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Client %s requested %s Opening" % (request.method, request.path))
|
|
||||||
|
|
||||||
def after_request(self, response):
|
|
||||||
|
|
||||||
# Close Tuner if it was in use, and did not close already
|
|
||||||
# if session["tuner_used"] is not None:
|
|
||||||
# tuner = self.fhdhr.device.tuners.tuners[str(session["tuner_used"])]
|
|
||||||
# if tuner.tuner_lock.locked():
|
|
||||||
# self.fhdhr.logger.info("Shutting down Tuner #%s after Request." % session["tuner_used"])
|
|
||||||
# tuner.close()
|
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Client %s requested %s Closing" % (request.method, request.path))
|
|
||||||
if not session["restart"]:
|
|
||||||
return response
|
|
||||||
else:
|
|
||||||
return self.stop()
|
|
||||||
|
|
||||||
def detect_internal_api(self, request):
|
|
||||||
user_agent = request.headers.get('User-Agent')
|
|
||||||
if not user_agent:
|
|
||||||
return False
|
|
||||||
elif str(user_agent).lower().startswith("fhdhr"):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def detect_deviceauth(self, request):
|
|
||||||
return request.args.get('DeviceAuth', default=None, type=str)
|
|
||||||
|
|
||||||
def detect_mobile(self, request):
|
|
||||||
user_agent = request.headers.get('User-Agent')
|
|
||||||
phones = ["iphone", "android", "blackberry"]
|
|
||||||
if not user_agent:
|
|
||||||
return False
|
|
||||||
elif any(phone in user_agent.lower() for phone in phones):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def detect_plexmediaserver(self, request):
|
|
||||||
user_agent = request.headers.get('User-Agent')
|
|
||||||
if not user_agent:
|
|
||||||
return False
|
|
||||||
elif str(user_agent).lower().startswith("plexmediaserver"):
|
|
||||||
return True
|
|
||||||
else:
|
|
||||||
return False
|
|
||||||
|
|
||||||
def add_endpoints(self, index_name):
|
|
||||||
|
|
||||||
item_list = [x for x in dir(self.endpoints_obj[index_name]) if self.isapath(x)]
|
|
||||||
endpoint_main = self.endpoints_obj[index_name]
|
|
||||||
endpoint_main.fhdhr.version # dummy line
|
|
||||||
for item in item_list:
|
|
||||||
endpoints = eval("endpoint_main.%s.%s" % (item, "endpoints"))
|
|
||||||
if isinstance(endpoints, str):
|
|
||||||
endpoints = [endpoints]
|
|
||||||
handler = eval("endpoint_main.%s" % item)
|
|
||||||
endpoint_name = eval("endpoint_main.%s.%s" % (item, "endpoint_name"))
|
|
||||||
|
|
||||||
try:
|
|
||||||
endpoint_methods = eval("endpoint_main.%s.%s" % (item, "endpoint_methods"))
|
|
||||||
except AttributeError:
|
|
||||||
endpoint_methods = ['GET']
|
|
||||||
|
|
||||||
try:
|
|
||||||
endpoint_access_level = eval("endpoint_main.%s.%s" % (item, "endpoint_access_level"))
|
|
||||||
except AttributeError:
|
|
||||||
endpoint_access_level = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
pretty_name = eval("endpoint_main.%s.%s" % (item, "pretty_name"))
|
|
||||||
except AttributeError:
|
|
||||||
pretty_name = endpoint_name
|
|
||||||
|
|
||||||
try:
|
|
||||||
endpoint_category = eval("endpoint_main.%s.%s" % (item, "endpoint_category"))
|
|
||||||
except AttributeError:
|
|
||||||
endpoint_category = index_name
|
|
||||||
|
|
||||||
try:
|
|
||||||
endpoint_default_parameters = eval("endpoint_main.%s.%s" % (item, "endpoint_default_parameters"))
|
|
||||||
except AttributeError:
|
|
||||||
endpoint_default_parameters = {}
|
|
||||||
|
|
||||||
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
|
||||||
|
|
||||||
if endpoint_category not in list(self.route_list.keys()):
|
|
||||||
self.route_list[endpoint_category] = {}
|
|
||||||
|
|
||||||
if endpoint_name not in list(self.route_list[endpoint_category].keys()):
|
|
||||||
self.route_list[endpoint_category][endpoint_name] = {}
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["name"] = endpoint_name
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["endpoints"] = endpoints
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["endpoint_methods"] = endpoint_methods
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["endpoint_access_level"] = endpoint_access_level
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["pretty_name"] = pretty_name
|
|
||||||
self.route_list[endpoint_category][endpoint_name]["endpoint_category"] = endpoint_category
|
|
||||||
|
|
||||||
for endpoint in endpoints:
|
|
||||||
self.add_endpoint(endpoint=endpoint,
|
|
||||||
endpoint_name=endpoint_name,
|
|
||||||
handler=handler,
|
|
||||||
methods=endpoint_methods)
|
|
||||||
|
|
||||||
def isapath(self, item):
|
|
||||||
not_a_page_list = ["fhdhr", "plugin_utils"]
|
|
||||||
if item in not_a_page_list:
|
|
||||||
return False
|
|
||||||
elif item.startswith("__") and item.endswith("__"):
|
|
||||||
return False
|
|
||||||
else:
|
|
||||||
return True
|
|
||||||
|
|
||||||
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
|
||||||
self.fhdhr.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
|
|
||||||
self.http = WSGIServer(self.fhdhr.api.address_tuple,
|
|
||||||
self.fhdhr.app.wsgi_app,
|
|
||||||
log=self.fhdhr.logger.logger,
|
|
||||||
error_log=self.fhdhr.logger.logger)
|
|
||||||
try:
|
|
||||||
self.http.serve_forever()
|
|
||||||
self.stop()
|
|
||||||
except AttributeError:
|
|
||||||
self.fhdhr.logger.info("HTTP Server Offline")
|
|
||||||
@ -1,172 +0,0 @@
|
|||||||
from flask import request, redirect, Response, abort
|
|
||||||
import urllib.parse
|
|
||||||
import json
|
|
||||||
|
|
||||||
from fHDHR.tools import channel_sort
|
|
||||||
|
|
||||||
|
|
||||||
class Channels():
|
|
||||||
endpoints = ["/api/channels"]
|
|
||||||
endpoint_name = "api_channels"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
endpoint_default_parameters = {
|
|
||||||
"method": "get"
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
method = request.args.get('method', default=None, type=str)
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
|
||||||
|
|
||||||
origin_methods = self.fhdhr.origins.valid_origins
|
|
||||||
origin = request.args.get('origin', default=None, type=str)
|
|
||||||
if origin and origin not in origin_methods:
|
|
||||||
return "%s Invalid channels origin" % origin
|
|
||||||
|
|
||||||
if method == "get":
|
|
||||||
channels_info = {}
|
|
||||||
if not origin:
|
|
||||||
origin_list = origin_methods
|
|
||||||
else:
|
|
||||||
origin_list = [origin]
|
|
||||||
|
|
||||||
for origin_item in origin_list:
|
|
||||||
|
|
||||||
channels_info[origin_item] = {}
|
|
||||||
|
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin=origin_item)]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.list[origin_item][fhdhr_id]
|
|
||||||
channel_dict = channel_obj.dict.copy()
|
|
||||||
channel_dict["m3u_url"] = channel_obj.api_m3u_url
|
|
||||||
channel_dict["stream_url"] = channel_obj.api_stream_url
|
|
||||||
channels_info[origin_item][channel_obj.number] = channel_dict
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(channels_info[origin_item].keys()))
|
|
||||||
sorted_chan_guide = []
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide.append(channels_info[origin_item][channel])
|
|
||||||
|
|
||||||
channels_info[origin_item] = sorted_chan_guide
|
|
||||||
|
|
||||||
channels_info_json = json.dumps(channels_info, indent=4)
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=channels_info_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
elif method == "favorite":
|
|
||||||
|
|
||||||
channel = request.args.get('channel', default=None, type=str)
|
|
||||||
if not channel:
|
|
||||||
if redirect_url:
|
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
|
||||||
else:
|
|
||||||
return "%s Falied" % method
|
|
||||||
|
|
||||||
if channel.startstwith(tuple(["+", "-", "x"])):
|
|
||||||
|
|
||||||
channel_method = channel[0]
|
|
||||||
channel_number = channel[1:]
|
|
||||||
|
|
||||||
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
|
||||||
response = Response("Not Found", status=404)
|
|
||||||
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
|
||||||
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
|
||||||
abort(response)
|
|
||||||
|
|
||||||
if channel_method == "+":
|
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
|
||||||
elif channel_method == "-":
|
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
|
|
||||||
elif channel_method == "x":
|
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle", origin)
|
|
||||||
|
|
||||||
else:
|
|
||||||
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
|
||||||
return abort(200, "Not a valid favorite command")
|
|
||||||
|
|
||||||
elif method in ["enable", "disable"]:
|
|
||||||
channel = request.args.get('channel', default=None, type=str)
|
|
||||||
if channel == "all":
|
|
||||||
self.fhdhr.device.channels.set_channel_enablement_all(method, origin)
|
|
||||||
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
|
||||||
if redirect_url:
|
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
|
||||||
else:
|
|
||||||
return "%s Falied" % method
|
|
||||||
else:
|
|
||||||
self.fhdhr.device.channels.set_channel_enablement("number", channel, method, origin)
|
|
||||||
|
|
||||||
elif method == "update":
|
|
||||||
channel_id = request.form.get('id', None)
|
|
||||||
updatedict = {}
|
|
||||||
for key in list(request.form.keys()):
|
|
||||||
if key != "id":
|
|
||||||
if key in ["name", "callsign", "thumbnail"]:
|
|
||||||
updatedict[key] = str(request.form.get(key))
|
|
||||||
elif key in ["number"]:
|
|
||||||
number = str(request.form.get(key))
|
|
||||||
if "." in number:
|
|
||||||
updatedict["subnumber"] = number.split(".")[1]
|
|
||||||
updatedict["number"] = number.split(".")[0]
|
|
||||||
else:
|
|
||||||
updatedict["number"] = number
|
|
||||||
elif key in ["enabled"]:
|
|
||||||
confvalue = request.form.get(key)
|
|
||||||
if str(confvalue).lower() in ["false"]:
|
|
||||||
confvalue = False
|
|
||||||
elif str(confvalue).lower() in ["true"]:
|
|
||||||
confvalue = True
|
|
||||||
updatedict[key] = confvalue
|
|
||||||
elif key in ["favorite", "HD"]:
|
|
||||||
updatedict[key] = int(request.form.get(key))
|
|
||||||
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
|
|
||||||
|
|
||||||
elif method == "modify":
|
|
||||||
channels_list = json.loads(request.form.get('channels', []))
|
|
||||||
for channel in channels_list:
|
|
||||||
updatedict = {}
|
|
||||||
for key in list(channel.keys()):
|
|
||||||
if key != "id":
|
|
||||||
if key in ["name", "callsign", "thumbnail"]:
|
|
||||||
updatedict[key] = str(channel[key])
|
|
||||||
elif key in ["number"]:
|
|
||||||
number = str(channel[key])
|
|
||||||
if "." in number:
|
|
||||||
updatedict["subnumber"] = number.split(".")[1]
|
|
||||||
updatedict["number"] = number.split(".")[0]
|
|
||||||
else:
|
|
||||||
updatedict["number"] = number
|
|
||||||
elif key in ["enabled"]:
|
|
||||||
confvalue = channel[key]
|
|
||||||
if str(confvalue).lower() in ["false"]:
|
|
||||||
confvalue = False
|
|
||||||
elif str(confvalue).lower() in ["true"]:
|
|
||||||
confvalue = True
|
|
||||||
updatedict[key] = confvalue
|
|
||||||
elif key in ["favorite", "HD"]:
|
|
||||||
updatedict[key] = int(channel[key])
|
|
||||||
else:
|
|
||||||
channel_id = str(channel[key])
|
|
||||||
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
|
|
||||||
|
|
||||||
elif method == "scan":
|
|
||||||
self.fhdhr.device.channels.get_channels(forceupdate=True, origin=origin)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return "Invalid Method"
|
|
||||||
|
|
||||||
if redirect_url:
|
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
|
||||||
else:
|
|
||||||
if method == "scan":
|
|
||||||
return redirect('/lineup_status.json')
|
|
||||||
else:
|
|
||||||
return "%s Success" % method
|
|
||||||
@ -1,132 +0,0 @@
|
|||||||
from flask import Response, request, redirect
|
|
||||||
import urllib.parse
|
|
||||||
import json
|
|
||||||
import datetime
|
|
||||||
|
|
||||||
from fHDHR.tools import humanized_time, channel_sort
|
|
||||||
|
|
||||||
|
|
||||||
class EPG():
|
|
||||||
"""Methods to create xmltv.xml"""
|
|
||||||
endpoints = ["/api/epg"]
|
|
||||||
endpoint_name = "api_epg"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
method = request.args.get('method', default="get", type=str)
|
|
||||||
|
|
||||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
|
||||||
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
|
||||||
return "%s Invalid epg method" % source
|
|
||||||
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
|
||||||
|
|
||||||
if method == "get":
|
|
||||||
|
|
||||||
epgdict = self.fhdhr.device.epg.get_epg(source)
|
|
||||||
if source in self.fhdhr.origins.valid_origins:
|
|
||||||
epgdict = epgdict.copy()
|
|
||||||
for c in list(epgdict.keys()):
|
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
|
||||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
|
||||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
|
||||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
|
||||||
epgdict[chan_obj.number]["number"] = chan_obj.number
|
|
||||||
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
|
||||||
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(epgdict.keys()))
|
|
||||||
sorted_chan_guide = {}
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide[channel] = epgdict[channel]
|
|
||||||
|
|
||||||
epg_json = json.dumps(sorted_chan_guide, indent=4)
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=epg_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
elif method == "current":
|
|
||||||
|
|
||||||
nowtime = datetime.datetime.utcnow().timestamp()
|
|
||||||
|
|
||||||
chan_guide_list = []
|
|
||||||
|
|
||||||
whatson = self.fhdhr.device.epg.whats_on_allchans(source)
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(whatson.keys()))
|
|
||||||
sorted_chan_guide = {}
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide[channel] = whatson[channel]
|
|
||||||
|
|
||||||
for channel in list(sorted_chan_guide.keys()):
|
|
||||||
if sorted_chan_guide[channel]["listing"][0]["time_end"]:
|
|
||||||
remaining_time = humanized_time(sorted_chan_guide[channel]["listing"][0]["time_end"] - nowtime)
|
|
||||||
else:
|
|
||||||
remaining_time = "N/A"
|
|
||||||
|
|
||||||
chan_dict = {
|
|
||||||
"name": sorted_chan_guide[channel]["name"],
|
|
||||||
"number": sorted_chan_guide[channel]["number"],
|
|
||||||
"chan_thumbnail": sorted_chan_guide[channel]["thumbnail"],
|
|
||||||
"listing_title": sorted_chan_guide[channel]["listing"][0]["title"],
|
|
||||||
"listing_thumbnail": sorted_chan_guide[channel]["listing"][0]["thumbnail"],
|
|
||||||
"listing_description": sorted_chan_guide[channel]["listing"][0]["description"],
|
|
||||||
"listing_remaining_time": str(remaining_time)
|
|
||||||
}
|
|
||||||
|
|
||||||
for time_item in ["time_start", "time_end"]:
|
|
||||||
|
|
||||||
if not sorted_chan_guide[channel]["listing"][0][time_item]:
|
|
||||||
chan_dict["listing_%s" % time_item] = "N/A"
|
|
||||||
elif str(sorted_chan_guide[channel]["listing"][0][time_item]).endswith(tuple(["+0000", "+00:00"])):
|
|
||||||
chan_dict["listing_%s" % time_item] = str(sorted_chan_guide[channel]["listing"][0][time_item])
|
|
||||||
else:
|
|
||||||
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
|
|
||||||
|
|
||||||
if source in self.fhdhr.origins.valid_origins:
|
|
||||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"], source)
|
|
||||||
|
|
||||||
chan_dict["name"] = chan_obj.dict["name"]
|
|
||||||
chan_dict["number"] = chan_obj.number
|
|
||||||
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
|
||||||
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
|
||||||
chan_dict["m3u_url"] = chan_obj.api_m3u_url
|
|
||||||
|
|
||||||
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
|
|
||||||
else:
|
|
||||||
if not chan_dict["listing_thumbnail"]:
|
|
||||||
chan_dict["listing_thumbnail"] = chan_dict["chan_thumbnail"]
|
|
||||||
if not chan_dict["listing_thumbnail"]:
|
|
||||||
chan_dict["listing_thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % chan_dict["number"]
|
|
||||||
|
|
||||||
chan_guide_list.append(chan_dict)
|
|
||||||
|
|
||||||
epg_json = json.dumps(chan_guide_list, indent=4)
|
|
||||||
|
|
||||||
return Response(status=200,
|
|
||||||
response=epg_json,
|
|
||||||
mimetype='application/json')
|
|
||||||
|
|
||||||
elif method == "update":
|
|
||||||
self.fhdhr.device.epg.update(source)
|
|
||||||
|
|
||||||
elif method == "clearcache":
|
|
||||||
self.fhdhr.device.epg.clear_epg_cache(source)
|
|
||||||
|
|
||||||
else:
|
|
||||||
return "%s Invalid Method" % method
|
|
||||||
|
|
||||||
if redirect_url:
|
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
|
||||||
else:
|
|
||||||
return "%s Success" % method
|
|
||||||
@ -1,127 +0,0 @@
|
|||||||
from flask import Response, request, redirect
|
|
||||||
import urllib.parse
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
from fHDHR.tools import channel_sort
|
|
||||||
|
|
||||||
|
|
||||||
class M3U():
|
|
||||||
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
|
||||||
endpoint_name = "api_m3u"
|
|
||||||
endpoint_methods = ["GET", "POST"]
|
|
||||||
|
|
||||||
def __init__(self, fhdhr):
|
|
||||||
self.fhdhr = fhdhr
|
|
||||||
|
|
||||||
def __call__(self, *args):
|
|
||||||
return self.get(*args)
|
|
||||||
|
|
||||||
def get(self, *args):
|
|
||||||
|
|
||||||
base_url = request.url_root[:-1]
|
|
||||||
|
|
||||||
method = request.args.get('method', default="get", type=str)
|
|
||||||
channel = request.args.get('channel', default="all", type=str)
|
|
||||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
|
||||||
|
|
||||||
if method == "get":
|
|
||||||
|
|
||||||
origin_methods = self.fhdhr.origins.valid_origins
|
|
||||||
origin = request.args.get('origin', default=None, type=str)
|
|
||||||
if origin and origin not in origin_methods:
|
|
||||||
return "%s Invalid channels origin" % origin
|
|
||||||
|
|
||||||
FORMAT_DESCRIPTOR = "#EXTM3U"
|
|
||||||
RECORD_MARKER = "#EXTINF"
|
|
||||||
|
|
||||||
fakefile = StringIO()
|
|
||||||
|
|
||||||
xmltvurl = ('%s/api/xmltv' % base_url)
|
|
||||||
|
|
||||||
fakefile.write("%s url-tvg=\"%s\" x-tvg-url=\"%s\"\n" % (FORMAT_DESCRIPTOR, xmltvurl, xmltvurl))
|
|
||||||
|
|
||||||
channel_items = []
|
|
||||||
|
|
||||||
if origin:
|
|
||||||
if channel == "all":
|
|
||||||
fileName = "channels.m3u"
|
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
|
||||||
if channel_obj.enabled:
|
|
||||||
channel_items.append(channel_obj)
|
|
||||||
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel, origin)
|
|
||||||
fileName = "%s.m3u" % channel_obj.number
|
|
||||||
if channel_obj.enabled:
|
|
||||||
channel_items.append(channel_obj)
|
|
||||||
else:
|
|
||||||
return "Channel Disabled"
|
|
||||||
elif channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id", origin)]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel, origin)
|
|
||||||
fileName = "%s.m3u" % channel_obj.number
|
|
||||||
if channel_obj.enabled:
|
|
||||||
channel_items.append(channel_obj)
|
|
||||||
else:
|
|
||||||
return "Channel Disabled"
|
|
||||||
elif not origin and channel == "all":
|
|
||||||
fileName = "channels.m3u"
|
|
||||||
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
|
||||||
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
|
|
||||||
if channel_obj.enabled:
|
|
||||||
channel_items.append(channel_obj)
|
|
||||||
elif not origin and channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id")]:
|
|
||||||
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel)
|
|
||||||
fileName = "%s.m3u" % channel_obj.number
|
|
||||||
if channel_obj.enabled:
|
|
||||||
channel_items.append(channel_obj)
|
|
||||||
else:
|
|
||||||
return "Channel Disabled"
|
|
||||||
else:
|
|
||||||
return "Invalid Channel"
|
|
||||||
|
|
||||||
channels_info = {}
|
|
||||||
for channel_obj in channel_items:
|
|
||||||
|
|
||||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
|
||||||
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
|
||||||
(base_url, str(channel_obj.dict['origin_id'])))
|
|
||||||
else:
|
|
||||||
logourl = channel_obj.thumbnail
|
|
||||||
|
|
||||||
channels_info[channel_obj.number] = {
|
|
||||||
"channelID": str(channel_obj.dict['origin_id']),
|
|
||||||
"tvg-chno": str(channel_obj.number),
|
|
||||||
"tvg-name": str(channel_obj.dict['name']),
|
|
||||||
"tvg-id": str(channel_obj.number),
|
|
||||||
"tvg-logo": logourl,
|
|
||||||
"group-title": channel_obj.origin,
|
|
||||||
"group-titleb": str(channel_obj.dict['name']),
|
|
||||||
"stream_url": "%s%s" % (base_url, channel_obj.api_stream_url)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Sort the channels
|
|
||||||
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
|
||||||
sorted_chan_guide = []
|
|
||||||
for channel in sorted_channel_list:
|
|
||||||
sorted_chan_guide.append(channels_info[channel])
|
|
||||||
|
|
||||||
for channel_item_dict in sorted_chan_guide:
|
|
||||||
m3ustring = "%s:0 " % (RECORD_MARKER)
|
|
||||||
for chan_key in list(channel_item_dict.keys()):
|
|
||||||
if not chan_key.startswith(tuple(["group-title", "stream_url"])):
|
|
||||||
m3ustring += "%s=\"%s\" " % (chan_key, channel_item_dict[chan_key])
|
|
||||||
m3ustring += "group-title=\"%s\",%s\n" % (channel_item_dict["group-title"], channel_item_dict["group-titleb"])
|
|
||||||
m3ustring += "%s\n" % channel_item_dict["stream_url"]
|
|
||||||
fakefile.write(m3ustring)
|
|
||||||
|
|
||||||
channels_m3u = fakefile.getvalue()
|
|
||||||
|
|
||||||
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
|
|
||||||
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
|
|
||||||
return resp
|
|
||||||
|
|
||||||
if redirect_url:
|
|
||||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
|
||||||
else:
|
|
||||||
return "%s Success" % method
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user