Compare commits
No commits in common. "main" and "v0.4.0" have entirely different histories.
17
.github/stale.yml
vendored
@ -1,17 +0,0 @@
|
||||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 60
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 7
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- pinned
|
||||
- security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: wontfix
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. It will be closed if no further activity occurs. Thank you
|
||||
for your contributions.
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
||||
@ -1,7 +1,7 @@
|
||||
FROM python:3.8-slim
|
||||
|
||||
RUN apt-get -qq update && \
|
||||
apt-get -qq -y install ffmpeg gcc && \
|
||||
apt-get -qq -y install ffmpeg && \
|
||||
apt-get autoclean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
@ -9,4 +9,4 @@ COPY ./ /app/
|
||||
WORKDIR /app
|
||||
RUN pip3 install -r requirements.txt
|
||||
|
||||
ENTRYPOINT ["python3", "/app/main.py", "--config", "/app/config/config.ini"]
|
||||
ENTRYPOINT ["python3", "/app/main.py", "--c", "/app/config/config.ini"]
|
||||
|
||||
28
README.md
@ -1,22 +1,30 @@
|
||||
<p align="center">fHDHR_NextPVR <img src="docs/images/logo.ico" alt="Logo"/></p>
|
||||
# fHDHR_NextPVR
|
||||
|
||||
|
||||
Welcome to the world of streaming content as a DVR device! We use some fancy python here to achieve a system of:
|
||||
Welcome to the world of streaming to Plex! We use some fancy python here to achieve a system of:
|
||||
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**H**iatus &
|
||||
**R**ecreation
|
||||
|
||||
|
||||
Please Check the [Docs](docs/README.md) for Installation information.
|
||||
(based off of original code from
|
||||
|
||||
fHDHR is labeled as beta until we reach v1.0.0
|
||||
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||
* myself coding for locast2plex
|
||||
|
||||
)
|
||||
|
||||
PRs welcome for:
|
||||
|
||||
* Docker support
|
||||
|
||||
|
||||
Please Check the repository wiki for Installation information.
|
||||
|
||||
Officially marking this Fork as Beta.
|
||||
|
||||
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||
|
||||
# !!NOTICE!!
|
||||
|
||||
To reduce code duplication between variants, I am moving to a plugin system.
|
||||
The normal variant repos will stay active during the transition.
|
||||
|
||||
59
config.all.ini
Normal file
@ -0,0 +1,59 @@
|
||||
[main]
|
||||
# uuid =
|
||||
# cache_dir =
|
||||
# servicename = NextPVR
|
||||
# reponame = fHDHR_NextPVR
|
||||
|
||||
[fhdhr]
|
||||
# address = 0.0.0.0
|
||||
# discovery_address = 0.0.0.0
|
||||
# port = 5004
|
||||
# stream_type = direct
|
||||
# tuner_count = 4
|
||||
# friendlyname = fHDHR-NextPVR
|
||||
# reporting_firmware_name = fHDHR_NextPVR
|
||||
# reporting_manufacturer = BoronDust
|
||||
# reporting_model = fHDHR
|
||||
# reporting_firmware_ver = 20201001
|
||||
# reporting_tuner_type = Antenna
|
||||
# device_auth = fHDHR
|
||||
|
||||
[epg]
|
||||
# images = pass
|
||||
# method = origin
|
||||
# update_frequency = 43200
|
||||
|
||||
[ffmpeg]
|
||||
# ffmpeg_path = ffmpeg
|
||||
# bytes_per_read = 1152000
|
||||
|
||||
[direct_stream]
|
||||
# chunksize = 1048576
|
||||
|
||||
[logging]
|
||||
# level = WARNING
|
||||
|
||||
[database]
|
||||
# type = sqlite
|
||||
# driver = None
|
||||
|
||||
[nextpvr]
|
||||
# address = localhost
|
||||
# port = 8866
|
||||
# ssl =
|
||||
# pin =
|
||||
# weight = 300
|
||||
|
||||
[zap2it]
|
||||
# delay = 5
|
||||
# postalcode = None
|
||||
# affiliate_id = gapzap
|
||||
# country = USA
|
||||
# device = -
|
||||
# headendid = lineupId
|
||||
# isoverride = True
|
||||
# languagecode = en
|
||||
# pref =
|
||||
# timespan = 6
|
||||
# timezone =
|
||||
# userid = -
|
||||
@ -1,39 +0,0 @@
|
||||
{
|
||||
"database":{
|
||||
"type":{
|
||||
"value": "sqlite",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"driver":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"user":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"pass":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"host":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"port":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"name":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,40 +0,0 @@
|
||||
{
|
||||
"epg":{
|
||||
"images":{
|
||||
"value": "pass",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"method":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"update_frequency":{
|
||||
"value": 43200,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"reverse_days": {
|
||||
"value": -1,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"forward_days": {
|
||||
"value": 7,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"block_size": {
|
||||
"value": 1800,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
}
|
||||
,
|
||||
"xmltv_offset": {
|
||||
"value": "+0000",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
}
|
||||
}
|
||||
}
|
||||
31
data/internal_config/fhdhr.ini
Normal file
@ -0,0 +1,31 @@
|
||||
[main]
|
||||
uuid =
|
||||
cache_dir =
|
||||
|
||||
[fhdhr]
|
||||
address = 0.0.0.0
|
||||
discovery_address = 0.0.0.0
|
||||
port = 5004
|
||||
reporting_manufacturer = BoronDust
|
||||
reporting_model = fHDHR
|
||||
reporting_firmware_ver = 20201001
|
||||
reporting_tuner_type = Antenna
|
||||
device_auth = fHDHR
|
||||
require_auth = False
|
||||
|
||||
[epg]
|
||||
images = pass
|
||||
|
||||
[ffmpeg]
|
||||
ffmpeg_path = ffmpeg
|
||||
bytes_per_read = 1152000
|
||||
|
||||
[direct_stream]
|
||||
chunksize = 1048576
|
||||
|
||||
[logging]
|
||||
level = WARNING
|
||||
|
||||
[database]
|
||||
type = sqlite
|
||||
driver = None
|
||||
@ -1,39 +0,0 @@
|
||||
{
|
||||
"fhdhr":{
|
||||
"address":{
|
||||
"value": "0.0.0.0",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"discovery_address":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"port":{
|
||||
"value": 5004,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"device_auth":{
|
||||
"value": "fHDHR",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"require_auth":{
|
||||
"value": false,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"chanscan_on_start":{
|
||||
"value": true,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"friendlyname":{
|
||||
"value": "fHDHR",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,9 +0,0 @@
|
||||
{
|
||||
"logging":{
|
||||
"level":{
|
||||
"value": "INFO",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,24 +0,0 @@
|
||||
{
|
||||
"main":{
|
||||
"uuid":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"cache_dir":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"servicename":{
|
||||
"value": "fHDHR",
|
||||
"config_file": false,
|
||||
"config_web": false
|
||||
},
|
||||
"reponame":{
|
||||
"value": "fHDHR",
|
||||
"config_file": false,
|
||||
"config_web": false
|
||||
}
|
||||
}
|
||||
}
|
||||
25
data/internal_config/serviceconf.ini
Normal file
@ -0,0 +1,25 @@
|
||||
[main]
|
||||
servicename = NextPVR
|
||||
dictpopname = nextpvr
|
||||
reponame = fHDHR_NextPVR
|
||||
required = nextpvr/pin
|
||||
valid_epg_methods = None,blocks,origin,zap2it
|
||||
|
||||
[fhdhr]
|
||||
friendlyname = fHDHR-NextPVR
|
||||
stream_type = direct
|
||||
tuner_count = 4
|
||||
reporting_firmware_name = fHDHR_NextPVR
|
||||
|
||||
[epg]
|
||||
method = origin
|
||||
update_frequency = 43200
|
||||
|
||||
[nextpvr]
|
||||
address = localhost
|
||||
port = 8866
|
||||
ssl = False
|
||||
pin =
|
||||
weight = 300
|
||||
epg_update_frequency = 43200
|
||||
sid =
|
||||
@ -1,29 +0,0 @@
|
||||
{
|
||||
"ssdp":{
|
||||
"enabled":{
|
||||
"value": true,
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"max_age":{
|
||||
"value": 1800,
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"proto":{
|
||||
"value": "ipv4",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"iface":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
},
|
||||
"multicast_address":{
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": false
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1,24 +0,0 @@
|
||||
{
|
||||
"streaming":{
|
||||
"bytes_per_read": {
|
||||
"value": 1152000,
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"origin_quality": {
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"transcode_quality": {
|
||||
"value": "none",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
},
|
||||
"method": {
|
||||
"value": "direct",
|
||||
"config_file": true,
|
||||
"config_web": true
|
||||
}
|
||||
}
|
||||
}
|
||||
13
data/internal_config/zap2it.ini
Normal file
@ -0,0 +1,13 @@
|
||||
[zap2it]
|
||||
delay = 5
|
||||
postalcode =
|
||||
affiliate_id = gapzap
|
||||
country = USA
|
||||
device = -
|
||||
headendid = lineupId
|
||||
isoverride = True
|
||||
languagecode = en
|
||||
pref =
|
||||
timespan = 6
|
||||
timezone =
|
||||
userid = -
|
||||
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
8
data/www/style.css
Normal file
@ -0,0 +1,8 @@
|
||||
.pull-right { float: right; }
|
||||
|
||||
.pull-lef { float: left; }
|
||||
|
||||
.center {
|
||||
margin-left: auto;
|
||||
margin-right: auto;
|
||||
}
|
||||
@ -1,191 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||
|
||||
---
|
||||
|
||||
Here, we'll break down all of the configuration options per section.
|
||||
|
||||
## Main
|
||||
Here's the `main` section.
|
||||
* `uuid` will be created automatically, you need not worry about this.
|
||||
* `cache_dir` is handy for keeping cached files out of the script directory. This is helpful for reinstalls as well as development.
|
||||
|
||||
````
|
||||
[main]
|
||||
# uuid =
|
||||
# cache_dir =
|
||||
````
|
||||
|
||||
## streaming
|
||||
|
||||
* `method` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||
* `origin_quality` can be set to high,medium,low for most variants. Variants that make use of m3u8 will Autoselect High for the direct method if not set. ffmpeg/vlc will determine the best stream on their own. Some Variants can allow alternative values.
|
||||
* `transcode_quality` works with ffmpeg/vlc to use fHDHR for handling quality instead of the origin. Valid settings include: heavy,mobile,internet720,internet480,internet360,internet240
|
||||
|
||||
|
||||
````
|
||||
[streaming]
|
||||
# method = direct
|
||||
# bytes_per_read = 1152000
|
||||
# origin_quality = None
|
||||
# transcode_quality = None
|
||||
````
|
||||
|
||||
|
||||
## fhdhr
|
||||
|
||||
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
||||
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
||||
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
||||
* `tuner_count` is a limit of devices able to stream from the script. The default is 3, as per Locast's documentation. A 4th is possible, but is not reccomended.
|
||||
* `friendlyname` is to set the name that Plex sees the script as.
|
||||
* `reporting_*` are settings that show how the script projects itself as a hardware device.
|
||||
* `device_auth` and `require_auth` are for an unimplemented Authentication feature.
|
||||
* `chanscan_on_start` Scans Origin for new channels at startup.
|
||||
|
||||
|
||||
````
|
||||
[fhdhr]
|
||||
# address = 0.0.0.0
|
||||
# discovery_address = 0.0.0.0
|
||||
# port = 5004
|
||||
# tuner_count = 4
|
||||
# friendlyname = fHDHR-Locast
|
||||
# reporting_firmware_name = fHDHR_Locast
|
||||
# reporting_manufacturer = BoronDust
|
||||
# reporting_model = fHDHR
|
||||
# reporting_firmware_ver = 20201001
|
||||
# reporting_tuner_type = Antenna
|
||||
# device_auth = fHDHR
|
||||
# require_auth = False
|
||||
# chanscan_on_start = True
|
||||
````
|
||||
|
||||
# EPG
|
||||
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
||||
* `method` defaults to `origin` and will pull the xmltv data from Locast. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
||||
* `update_frequency` determines how often we check for new scheduling information. In Seconds.
|
||||
* `reverse_days` allows Blocks of EPG data to be created prior to the start of the EPG Source data.
|
||||
* `forward_days` allows Blocks of EPG data to be created after the end of the EPG Source data.
|
||||
* `block_size` in seconds, sets the default block size for data before, after and missing timeslots.
|
||||
* `xmltv_offset` allows the final xmltv file to have an offset for users with timezone issues.
|
||||
|
||||
````
|
||||
[epg]
|
||||
# images = pass
|
||||
# method = origin
|
||||
# update_frequency = 43200
|
||||
# reverse_days = -1
|
||||
# forward_days = 7
|
||||
# block_size = 1800
|
||||
# xmltv_offset = +0000
|
||||
````
|
||||
|
||||
## ffmpeg
|
||||
|
||||
The `ffmpeg` section includes:
|
||||
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||
|
||||
````
|
||||
[ffmpeg]
|
||||
# path = ffmpeg
|
||||
````
|
||||
|
||||
## vlc
|
||||
|
||||
The `vlc` section includes:
|
||||
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||
|
||||
````
|
||||
[vlc]
|
||||
# path = cvlc
|
||||
````
|
||||
|
||||
# Logging
|
||||
* `level` determines the amount of logging you wish to see in the console, as well as to the logfile (stored in your cache directory).
|
||||
|
||||
````
|
||||
[logging]
|
||||
# level = WARNING
|
||||
````
|
||||
|
||||
# Database
|
||||
* experiment with these settings at your own risk. We use sqlalchemy to provide database options, but we default to sqlite.
|
||||
|
||||
TODO: improve documentation here.
|
||||
|
||||
````
|
||||
[database]
|
||||
# type = sqlite
|
||||
# driver = None
|
||||
user = None
|
||||
pass = None
|
||||
host = None
|
||||
port = None
|
||||
name = None
|
||||
````
|
||||
|
||||
## RMG
|
||||
|
||||
````
|
||||
# enabled = True
|
||||
````
|
||||
|
||||
## SSDP
|
||||
|
||||
````
|
||||
# enabled = True
|
||||
# max_age = 1800
|
||||
# proto = ipv6
|
||||
# iface = None
|
||||
# multicast_address = None
|
||||
````
|
||||
|
||||
## NextPVR
|
||||
The `nextpvr` section
|
||||
* What `address` to contact nextpvrat.
|
||||
* what `port` does nextpvruse
|
||||
* does nextpvruse `ssl`?
|
||||
* `pin` is a required credential.
|
||||
|
||||
````
|
||||
[nextpvr]
|
||||
address = localhost
|
||||
port = 8866
|
||||
ssl =
|
||||
pin =
|
||||
````
|
||||
|
||||
## zap2it
|
||||
|
||||
`zap2it` contains a ton of configuration options, and defaults to options that in my experience don't need to be adjusted.
|
||||
* `postalcode` is a value of importance, and is helpful. If not set, the script will attempt to retrieve your postalcode automatically.
|
||||
|
||||
````
|
||||
[zap2it]
|
||||
# delay = 5
|
||||
# postalcode = None
|
||||
# affiliate_id = gapzap
|
||||
# country = USA
|
||||
# device = -
|
||||
# headendid = lineupId
|
||||
# isoverride = True
|
||||
# languagecode = en
|
||||
# pref =
|
||||
# timespan = 6
|
||||
# timezone =
|
||||
# userid = -
|
||||
````
|
||||
@ -1,43 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||
|
||||
---
|
||||
|
||||
The example config file contains all of the things that the typical user may need to fill out.
|
||||
|
||||
Please see the Advanced Configuration page for more information.
|
||||
|
||||
## fHDHR
|
||||
|
||||
Under `fhdhr`, you'll find 2 addresses listed. `0.0.0.0` works great for a listen address, however, it seems that SSDP works best if the discovery address is set to the IP to say that there is a service at.
|
||||
|
||||
````
|
||||
[fhdhr]
|
||||
# address = 0.0.0.0
|
||||
# port = 5004
|
||||
# discovery_address = 0.0.0.0
|
||||
````
|
||||
|
||||
## NextPVR
|
||||
|
||||
NextPVR requires signin pin, so add that.
|
||||
|
||||
|
||||
````
|
||||
[nextpvr]
|
||||
address = localhost
|
||||
port = 8866
|
||||
pin =
|
||||
````
|
||||
@ -1,15 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
|
||||
This varient of fHDHR connects to a local NextPVR instance.
|
||||
@ -1,46 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
# The Boring Disclaimers (at the top of the docs for a reason)
|
||||
|
||||
|
||||
fHDHR is a Python service to take various sources of video and make them accessible to client software including, but not limited to:
|
||||
|
||||
* [Plex](https://www.plex.tv/)
|
||||
* [Emby](https://emby.media/)
|
||||
* [Jellyfin](https://jellyfin.org/)
|
||||
* [Channels](https://getchannels.com/)
|
||||
|
||||
fHDHR is not directly affiliated with the above client software, and you will receive NO support for this script via their forums.
|
||||
|
||||
fHDHR is able to connect to clients by emulating a piece of hardware called the [HDHomeRun from SiliconDust](https://www.silicondust.com/). fHDHR is in NO way affiliated with SiliconDust, and is NOT a HDHomeRun device. fHDHR simply uses the API structure used by the authentic HDHomeRun to connect to client DVR solutions.
|
||||
|
||||
# History
|
||||
|
||||
I got the Huappage QuadHD, and the Mohu Sail as a pandemic-project. All was fine working within Plex, but I also have emby setup as a backup to Plex when auth is broken.
|
||||
|
||||
I thought to myself, "Self, I should look on github for a way to share my tv tuner between the two".
|
||||
|
||||
That's when I tried both npvrProxy with NextPVR as well as tvhProxy with TVHeadend. I had to tinker with both to get them working, but I started testing which one I liked more.
|
||||
|
||||
Around this same time, I stumbled upon [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex). I wanted to contribute to that project to get it to a point that I could fork it to work for other video stream sources.
|
||||
|
||||
The locast2plex code development wasn't going quite fast enough for the feature-creep in my head.
|
||||
|
||||
I then proceded to create the initial iteration of fHDHR which I originally called "FakeHDHR". I've rewritten the core functionality a few times before landing on the current code structure, which feels 'right'.
|
||||
|
||||
I've worked really hard to create a structure that simplifies new variants of the core code to work with different 'origin' streams. Combining these works really well with [xTeVe](https://github.com/xteve-project/xTeVe).
|
||||
|
||||
One of the variants goes as far as scraping a table from a PDF file for creating a channel guide!
|
||||
|
||||
I can easily create more variants of the project to do other video sources. Paid ones, I could potentially accept donations for, as I don't want to pay to develop for multiple platforms.
|
||||
@ -1,26 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
|
||||
While the fHDHR reops share very little code from the below projects, they were a source of inspiration:
|
||||
|
||||
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||
|
||||
|
||||
Aside from the above, these other projects are worth a look as well:
|
||||
|
||||
* [npvrProxy](https://github.com/rogueosb/npvrProxy)
|
||||
* [xTeVe](https://xteve.de/)
|
||||
* [telly](https://github.com/tellytv/telly)
|
||||
* [dizquetv](https://github.com/vexorian/dizquetv)
|
||||
129
docs/Usage.md
@ -1,129 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||
|
||||
---
|
||||
|
||||
# Author Notes
|
||||
|
||||
* All Testing is currently done in Proxmox LXC, Ubuntu 20.04, Python 3.8
|
||||
|
||||
|
||||
# Prerequisites
|
||||
|
||||
* A Linux or Mac "Server". Windows currently does not work. A "Server" is a computer that is typically always online.
|
||||
* Python 3.7 or later.
|
||||
* Consult [This Page](Origin.md) for additional setup specific to this variant of fHDHR.
|
||||
|
||||
|
||||
# Optional Prerequisites
|
||||
* If you intend to use Docker, [This Guide](https://docs.docker.com/get-started/) should help you get started. The author of fHDHR is not a docker user, but will still try to help.
|
||||
|
||||
fHDHR uses direct connections with video sources by default. Alternatively, you can install and update the [config](Config.md) accordingly. You will need to make these available to your systems PATH, or manually set their path via the config file.
|
||||
|
||||
* ffmpeg
|
||||
* vlc
|
||||
|
||||
|
||||
# Installation
|
||||
|
||||
## Linux
|
||||
|
||||
* Download the zip, or git clone
|
||||
* Navigate into your script directory and run `pip3 install -r requirements.txt`
|
||||
* Copy the included `config.example.ini` file to a known location. The script will not run without this. There is no default configuration file location. [Modify the configuration file to suit your needs.](Config.md)
|
||||
|
||||
* Run with `python3 main.py -c=` and the path to the config file.
|
||||
|
||||
|
||||
## Docker
|
||||
This portion of the guide assumes you are using a Linux system with both docker and docker-compose installed. This (or some variation thereof) may work on Mac or Windows, but has not been tested.
|
||||
|
||||
* this guide assumes we wish to use the `~/fhdhr` directory for our install (you can use whatever directory you like, just make the appropriate changes elsewhere in this guide) and that we are installing for NextPVR support
|
||||
* run the following commands to clone the repo into `~/fhdhr/fHDHR_NextPVR`
|
||||
```
|
||||
cd ~/fhdhr
|
||||
git clone https://github.com/fHDHR/fHDHR_NextPVR.git
|
||||
```
|
||||
* create your config.ini file (as described earlier in this guide) in the `~/fhdhr/fHDHR_NextPVR` directory
|
||||
* while still in the `~/fhdhr` directory, create the following `docker-compose.yml` file
|
||||
```
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
nextpvr:
|
||||
build: ./fHDHR_NextPVR
|
||||
container_name: nextpvr
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ./fHDHR_NextPVR/config.ini:/app/config/config.ini
|
||||
```
|
||||
* run the following command to build and launch the container
|
||||
```
|
||||
docker-compose up --build -d nextpvr
|
||||
```
|
||||
|
||||
After a short period of time (during which docker will build your new fHDHR container), you should now have a working build of fHDHR running inside a docker container.
|
||||
|
||||
As the code changes and new versions / bug fixes are released, at any point you can pull the latest version of the code and rebuild your container with the following commands:
|
||||
```
|
||||
cd ~/fhdhr/fHDHR_NextPVR
|
||||
git checkout master
|
||||
git pull
|
||||
cd ~/fhdhr
|
||||
docker-compose up --build -d nextpvr
|
||||
```
|
||||
<hr />
|
||||
|
||||
You can also run multiple instances of fHDHR to support additional sources by cloning the appropriate repo into your `~/fhdhr` directory and adding the necessary services to the docker-compose file we created above.
|
||||
|
||||
* for example, if we also wanted PlutoTV support, you would clone the PlutoTV repository:
|
||||
```
|
||||
cd ~/fhdhr
|
||||
git clone https://github.com/fHDHR/fHDHR_PlutoTV.git
|
||||
```
|
||||
* **NOTE**: if you are running multiple services on the same machine, you must change the port in your config.ini file for each one. For example, if NextPVR was using the default port of 5004, PlutoTV cannot also use that port. You must change the port in your PlutoTV config.ini file to something else (5005, for example).
|
||||
* add plutotv as a service in your `docker-compose.yml` file
|
||||
```
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
nextpvr:
|
||||
build: ./fHDHR_NextPVR
|
||||
container_name: nextpvr
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ./fHDHR_NextPVR/config.ini:/app/config/config.ini
|
||||
|
||||
plutotv:
|
||||
build: ./fHDHR_PlutoTV
|
||||
container_name: plutotv
|
||||
network_mode: host
|
||||
volumes:
|
||||
- ./fHDHR_PlutoTV/config.ini:/app/config/config.ini
|
||||
```
|
||||
* run the following command to build and launch the container
|
||||
```
|
||||
docker-compose up --build -d plutotv
|
||||
```
|
||||
|
||||
You can repeat these instructions for as many fHDHR containers as your system resources will allow.
|
||||
|
||||
# Setup
|
||||
|
||||
Now that you have fHDHR running, You can navigate (in a web browser) to the IP:Port from the configuration step above.
|
||||
|
||||
If you did not setup a `discovery_address` in your config, SSDP will be disabled. This is not a problem as clients like Plex can have the IP:Port entered manually!
|
||||
|
||||
You can copy the xmltv link from the webUI and use that in your client software to provide Channel Guide information.
|
||||
@ -1,98 +0,0 @@
|
||||
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||
|
||||
---
|
||||
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||
---
|
||||
**f**un
|
||||
**H**ome
|
||||
**D**istribution
|
||||
**H**iatus
|
||||
**R**ecreation
|
||||
|
||||
---
|
||||
|
||||
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||
|
||||
---
|
||||
|
||||
|
||||
This Page will introduce basic handling of the script from the Web Interface provided at IP:Port
|
||||
|
||||
The Pages are available in the buttons at the top, links to xmltv and m3u are provided at the top for ease of access.
|
||||
|
||||
|
||||
# Main Landing Page
|
||||
|
||||
Below is the main landing page with basic information.
|
||||
|
||||
<img src="screenshots/webui_main.PNG" alt="Main Page"/>
|
||||
|
||||
# NextPVR
|
||||
|
||||
Here you will have access to some basic information about the service we are proxying.
|
||||
|
||||
The webUI will still work, even if setup didn't go smoothly.
|
||||
|
||||
<img src="screenshots/webui_origin.PNG" alt="Origin Page"/>
|
||||
|
||||
# Guide
|
||||
|
||||
This Page give you information about what is currently playing on all stations. It will also show the time remaining for each item.
|
||||
|
||||
* Note: The Play link in the left hand column can be copied to play a channel in VLC media player!
|
||||
|
||||
<img src="screenshots/webui_guide.PNG" alt="Guide Page"/>
|
||||
|
||||
|
||||
# Cluster
|
||||
|
||||
Since SSDP is used for service discovery, I decided to also use it for ease of management.
|
||||
|
||||
This tab will not have the below options if SSDP isn't running.
|
||||
|
||||
Joining a cluster will provide a second row of buttons for the clustered servers.
|
||||
|
||||
Unjoined:
|
||||
|
||||
<img src="screenshots/webui_cluster_unjoined.PNG" alt="Cluster Page, UnJoined"/>
|
||||
|
||||
Joined:
|
||||
|
||||
<img src="screenshots/webui_cluster_joined.PNG" alt="Cluster Page, Joined"/>
|
||||
|
||||
|
||||
# Streams
|
||||
|
||||
This Page will show all active streams, and tuner information. You can also terminate a stream from here.
|
||||
|
||||
* Note: Clients will often have an amount buffered, and the connection termination is not immediate from a viewing perspective. However, the connection to the source is indeed cut off.
|
||||
|
||||
<img src="screenshots/webui_streams.PNG" alt="Streams Page"/>
|
||||
|
||||
# xmltv
|
||||
|
||||
This page will give you access to all the xmltv formats provided by this varient.
|
||||
|
||||
From here, you can manually update or even clear the cached epg, and then update.
|
||||
|
||||
<img src="screenshots/webui_xmltv.PNG" alt="xmltv Page"/>
|
||||
|
||||
# Version
|
||||
|
||||
This page will give valuable information about the environment the script is being run in.
|
||||
|
||||
<img src="screenshots/webui_version.PNG" alt="Version Page"/>
|
||||
|
||||
# Diganostics
|
||||
|
||||
This page has various links to json/xml files that make the magic work, as well as debug and cluster information.
|
||||
|
||||
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||
|
||||
# Settings
|
||||
|
||||
This page allows viewing/changing all possible configuration options.
|
||||
|
||||
* Note: This will require a restart of the script to have any effect.
|
||||
|
||||
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||
|
Before Width: | Height: | Size: 15 KiB |
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 27 KiB |
|
Before Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 137 KiB |
|
Before Width: | Height: | Size: 23 KiB |
|
Before Width: | Height: | Size: 30 KiB |
|
Before Width: | Height: | Size: 53 KiB |
|
Before Width: | Height: | Size: 39 KiB |
|
Before Width: | Height: | Size: 26 KiB |
|
Before Width: | Height: | Size: 27 KiB |
@ -1,42 +1,23 @@
|
||||
# coding=utf-8
|
||||
|
||||
from .origin import OriginServiceWrapper
|
||||
from .device import fHDHR_Device
|
||||
from .api import fHDHR_API_URLs
|
||||
|
||||
import fHDHR.tools
|
||||
fHDHR_VERSION = "v0.6.0-beta"
|
||||
|
||||
|
||||
class fHDHR_INT_OBJ():
|
||||
|
||||
def __init__(self, settings, logger, db, plugins):
|
||||
self.version = fHDHR_VERSION
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.db = db
|
||||
self.plugins = plugins
|
||||
|
||||
self.web = fHDHR.tools.WebReq()
|
||||
for plugin_name in list(self.plugins.plugins.keys()):
|
||||
self.plugins.plugins[plugin_name].plugin_utils.web = self.web
|
||||
|
||||
self.api = fHDHR_API_URLs(settings, self.web)
|
||||
for plugin_name in list(self.plugins.plugins.keys()):
|
||||
self.plugins.plugins[plugin_name].plugin_utils.api = self.api
|
||||
|
||||
self.threads = {}
|
||||
fHDHR_VERSION = "v0.4.0-beta"
|
||||
|
||||
|
||||
class fHDHR_OBJ():
|
||||
|
||||
def __init__(self, settings, logger, db, plugins):
|
||||
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db, plugins)
|
||||
def __init__(self, settings, logger, db):
|
||||
self.version = fHDHR_VERSION
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.db = db
|
||||
|
||||
self.fhdhr.origins = fHDHR.origins.Origins(self.fhdhr)
|
||||
self.web = fHDHR.tools.WebReq()
|
||||
|
||||
self.device = fHDHR_Device(self.fhdhr, self.fhdhr.origins)
|
||||
self.origin = OriginServiceWrapper(settings, logger, self.web, db)
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if hasattr(self.fhdhr, name):
|
||||
return eval("self.fhdhr.%s" % name)
|
||||
self.device = fHDHR_Device(settings, self.version, self.origin, logger, self.web, db)
|
||||
|
||||
@ -1,82 +0,0 @@
|
||||
import urllib.parse
|
||||
|
||||
|
||||
class Fillin_Client():
|
||||
|
||||
def __init__(self, settings, web):
|
||||
self.config = settings
|
||||
self.web = web
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if hasattr(self.web.session, name):
|
||||
return eval("self.web.session.%s" % name)
|
||||
|
||||
|
||||
class fHDHR_API_URLs():
|
||||
|
||||
def __init__(self, settings, web):
|
||||
self.config = settings
|
||||
self.web = web
|
||||
|
||||
self.headers = {'User-Agent': "fHDHR/%s" % self.config.internal["versions"]["fHDHR"]}
|
||||
|
||||
# Replaced later
|
||||
self.client = Fillin_Client(settings, web)
|
||||
|
||||
self.address = self.config.dict["fhdhr"]["address"]
|
||||
self.discovery_address = self.config.dict["fhdhr"]["discovery_address"]
|
||||
self.port = self.config.dict["fhdhr"]["port"]
|
||||
|
||||
def get(self, url, *args):
|
||||
|
||||
req_method = type(self.client).__name__
|
||||
|
||||
if not url.startswith("http"):
|
||||
if not url.startswith("/"):
|
||||
url = "/%s" % url
|
||||
url = "%s%s" % (self.base, url)
|
||||
|
||||
if req_method == "FlaskClient":
|
||||
self.client.get(url, headers=self.headers, *args)
|
||||
else:
|
||||
self.client.get(url, headers=self.headers, *args)
|
||||
|
||||
def post(self, url, *args):
|
||||
|
||||
req_method = type(self.client).__name__
|
||||
|
||||
if not url.startswith("http"):
|
||||
if not url.startswith("/"):
|
||||
url = "/%s" % url
|
||||
url = "%s%s" % (self.base, url)
|
||||
|
||||
if req_method == "FlaskClient":
|
||||
self.client.post(url, headers=self.headers, *args)
|
||||
else:
|
||||
self.client.post(url, headers=self.headers, *args)
|
||||
|
||||
@property
|
||||
def base(self):
|
||||
if self.discovery_address:
|
||||
return ('http://%s:%s' % self.discovery_address_tuple)
|
||||
elif self.address == "0.0.0.0":
|
||||
return ('http://%s:%s' % self.address_tuple)
|
||||
else:
|
||||
return ('http://%s:%s' % self.address_tuple)
|
||||
|
||||
@property
|
||||
def base_quoted(self):
|
||||
return urllib.parse.quote(self.base)
|
||||
|
||||
@property
|
||||
def discovery_address_tuple(self):
|
||||
return (self.discovery_address, int(self.port))
|
||||
|
||||
@property
|
||||
def localhost_address_tuple(self):
|
||||
return ("127.0.0.1", int(self.port))
|
||||
|
||||
@property
|
||||
def address_tuple(self):
|
||||
return (self.address, int(self.port))
|
||||
@ -2,21 +2,20 @@ import os
|
||||
import sys
|
||||
import argparse
|
||||
import time
|
||||
import multiprocessing
|
||||
|
||||
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||
import fHDHR.exceptions
|
||||
import fHDHR.config
|
||||
import fHDHR.logger
|
||||
import fHDHR.plugins
|
||||
import fHDHR.origins
|
||||
from fHDHR.http import fHDHR_HTTP_Server
|
||||
from fHDHR.db import fHDHRdb
|
||||
|
||||
ERR_CODE = 1
|
||||
ERR_CODE_NO_RESTART = 2
|
||||
|
||||
|
||||
if sys.version_info.major == 2 or sys.version_info < (3, 7):
|
||||
print('Error: fHDHR requires python 3.7+.')
|
||||
if sys.version_info.major == 2 or sys.version_info < (3, 3):
|
||||
print('Error: fHDHR requires python 3.3+.')
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
@ -27,38 +26,36 @@ def build_args_parser():
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_configuration(args, script_dir, fHDHR_web):
|
||||
def get_configuration(args, script_dir):
|
||||
if not os.path.isfile(args.cfg):
|
||||
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||
return fHDHR.config.Config(args.cfg, script_dir, fHDHR_web)
|
||||
return fHDHR.config.Config(args.cfg, script_dir)
|
||||
|
||||
|
||||
def run(settings, logger, db, script_dir, fHDHR_web, plugins):
|
||||
def run(settings, logger, db):
|
||||
|
||||
fhdhr = fHDHR_OBJ(settings, logger, db, plugins)
|
||||
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
||||
fhdhr = fHDHR_OBJ(settings, logger, db)
|
||||
fhdhrweb = fHDHR_HTTP_Server(fhdhr)
|
||||
|
||||
try:
|
||||
|
||||
# Start Flask Thread
|
||||
fhdhrweb.start()
|
||||
print("HTTP Server Starting")
|
||||
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
|
||||
fhdhr_web.start()
|
||||
|
||||
# Start SSDP Thread
|
||||
if settings.dict["fhdhr"]["discovery_address"]:
|
||||
fhdhr.device.ssdp.start()
|
||||
print("SSDP Server Starting")
|
||||
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
|
||||
fhdhr_ssdp.start()
|
||||
|
||||
# Start EPG Thread
|
||||
if settings.dict["epg"]["method"]:
|
||||
fhdhr.device.epg.start()
|
||||
|
||||
# Perform some actions now that HTTP Server is running
|
||||
fhdhr.api.get("/api/startup_tasks")
|
||||
print("EPG Update Starting")
|
||||
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
|
||||
fhdhr_epg.start()
|
||||
|
||||
# wait forever
|
||||
restart_code = "restart"
|
||||
while fhdhr.threads["flask"].is_alive():
|
||||
time.sleep(1)
|
||||
return restart_code
|
||||
while True:
|
||||
time.sleep(3600)
|
||||
|
||||
except KeyboardInterrupt:
|
||||
return ERR_CODE_NO_RESTART
|
||||
@ -66,52 +63,30 @@ def run(settings, logger, db, script_dir, fHDHR_web, plugins):
|
||||
return ERR_CODE
|
||||
|
||||
|
||||
def start(args, script_dir, fHDHR_web):
|
||||
def start(args, script_dir):
|
||||
"""Get Configuration for fHDHR and start"""
|
||||
|
||||
try:
|
||||
settings = get_configuration(args, script_dir, fHDHR_web)
|
||||
settings = get_configuration(args, script_dir)
|
||||
except fHDHR.exceptions.ConfigurationError as e:
|
||||
print(e)
|
||||
return ERR_CODE_NO_RESTART
|
||||
|
||||
# Find Plugins and import their default configs
|
||||
plugins = fHDHR.plugins.PluginsHandler(settings)
|
||||
logger = settings.logging_setup()
|
||||
|
||||
# Apply User Configuration
|
||||
settings.user_config()
|
||||
settings.config_verification()
|
||||
|
||||
# Setup Logging
|
||||
logger = fHDHR.logger.Logger(settings)
|
||||
|
||||
# Setup Database
|
||||
db = fHDHRdb(settings)
|
||||
|
||||
# Setup Plugins
|
||||
plugins.load_plugins(logger, db)
|
||||
plugins.setup()
|
||||
settings.config_verification_plugins()
|
||||
|
||||
if not len([x for x in list(plugins.plugins.keys()) if plugins.plugins[x].type == "origin"]):
|
||||
print("No Origin Plugins found.")
|
||||
return ERR_CODE
|
||||
|
||||
return run(settings, logger, db, script_dir, fHDHR_web, plugins)
|
||||
return run(settings, logger, db)
|
||||
|
||||
|
||||
def main(script_dir, fHDHR_web):
|
||||
def main(script_dir):
|
||||
"""fHDHR run script entry point"""
|
||||
|
||||
print("Loading fHDHR %s" % fHDHR_VERSION)
|
||||
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
||||
print("Loading fHDHR " + fHDHR_VERSION)
|
||||
|
||||
try:
|
||||
args = build_args_parser()
|
||||
while True:
|
||||
returned_code = start(args, script_dir, fHDHR_web)
|
||||
if returned_code not in ["restart"]:
|
||||
return returned_code
|
||||
return start(args, script_dir)
|
||||
except KeyboardInterrupt:
|
||||
print("\n\nInterrupted")
|
||||
return ERR_CODE
|
||||
|
||||
@ -1,281 +1,79 @@
|
||||
import os
|
||||
import sys
|
||||
import random
|
||||
import configparser
|
||||
import pathlib
|
||||
import platform
|
||||
import json
|
||||
import logging
|
||||
import subprocess
|
||||
|
||||
import fHDHR.exceptions
|
||||
from fHDHR import fHDHR_VERSION
|
||||
from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
||||
from fHDHR.tools import isint, isfloat, is_arithmetic
|
||||
|
||||
|
||||
class Config():
|
||||
|
||||
def __init__(self, filename, script_dir, fHDHR_web):
|
||||
self.fHDHR_web = fHDHR_web
|
||||
|
||||
self.internal = {}
|
||||
self.conf_default = {}
|
||||
def __init__(self, filename, script_dir):
|
||||
self.dict = {}
|
||||
self.internal["versions"] = {}
|
||||
self.config_file = filename
|
||||
self.parser = configparser.RawConfigParser(allow_no_value=True)
|
||||
|
||||
self.core_setup(script_dir)
|
||||
self.load_defaults(script_dir)
|
||||
|
||||
def core_setup(self, script_dir):
|
||||
print("Loading Configuration File: " + str(self.config_file))
|
||||
self.read_config(self.config_file)
|
||||
|
||||
self.config_verification()
|
||||
|
||||
def load_defaults(self, script_dir):
|
||||
|
||||
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||
internal_plugins_dir = pathlib.Path(script_dir).joinpath('plugins')
|
||||
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
||||
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
||||
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||
www_images_dir = pathlib.Path(www_dir).joinpath('images')
|
||||
|
||||
self.internal["paths"] = {
|
||||
self.dict["filedir"] = {
|
||||
"script_dir": script_dir,
|
||||
"data_dir": data_dir,
|
||||
"plugins_dir": [internal_plugins_dir],
|
||||
|
||||
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||
"fHDHR_web_dir": fHDHR_web_dir,
|
||||
"www_dir": www_dir,
|
||||
"www_templates_dir": pathlib.Path(fHDHR_web_dir).joinpath('templates'),
|
||||
"www_images_dir": www_images_dir,
|
||||
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||
"favicon": pathlib.Path(data_dir).joinpath('favicon.ico'),
|
||||
"epg_cache": {},
|
||||
}
|
||||
|
||||
for conffile in os.listdir(self.internal["paths"]["internal_config"]):
|
||||
conffilepath = os.path.join(self.internal["paths"]["internal_config"], conffile)
|
||||
if str(conffilepath).endswith(".json"):
|
||||
self.read_json_config(conffilepath)
|
||||
for conffile in os.listdir(self.dict["filedir"]["internal_config"]):
|
||||
conffilepath = os.path.join(self.dict["filedir"]["internal_config"], conffile)
|
||||
self.read_config(conffilepath)
|
||||
|
||||
for file_item in os.listdir(self.internal["paths"]["fHDHR_web_dir"]):
|
||||
file_item_path = pathlib.Path(self.internal["paths"]["fHDHR_web_dir"]).joinpath(file_item)
|
||||
if str(file_item_path).endswith("_conf.json"):
|
||||
self.read_json_config(file_item_path)
|
||||
|
||||
self.dict["epg"]["valid_methods"] = {None: {}}
|
||||
self.dict["origins"] = {}
|
||||
self.dict["origins"]["valid_methods"] = {}
|
||||
self.dict["streaming"]["valid_methods"] = {"direct": {}}
|
||||
self.dict["plugin_web_paths"] = {}
|
||||
|
||||
self.load_versions()
|
||||
|
||||
def register_web_path(self, name, path, plugin_dict_name):
|
||||
self.dict["plugin_web_paths"][name.lower()] = {
|
||||
"name": name,
|
||||
"namespace": name.lower(),
|
||||
"path": path,
|
||||
"plugin": plugin_dict_name
|
||||
}
|
||||
|
||||
def register_valid_origin_method(self, method_item):
|
||||
self.dict["origins"]["valid_methods"][method_item.lower()] = {
|
||||
"name": method_item,
|
||||
"namespace": method_item.lower(),
|
||||
}
|
||||
|
||||
def register_valid_streaming_method(self, method_item, plugin_dict_name):
|
||||
self.dict["streaming"]["valid_methods"][method_item.lower()] = {
|
||||
"name": method_item,
|
||||
"namespace": method_item.lower(),
|
||||
"plugin": plugin_dict_name
|
||||
}
|
||||
|
||||
def register_valid_epg_method(self, method_item, plugin_dict_name):
|
||||
self.dict["epg"]["valid_methods"][method_item.lower()] = {
|
||||
"name": method_item,
|
||||
"namespace": method_item.lower(),
|
||||
"plugin": plugin_dict_name
|
||||
}
|
||||
|
||||
def register_version(self, item_name, item_version, item_type):
|
||||
self.internal["versions"][item_name] = {
|
||||
"name": item_name,
|
||||
"version": item_version,
|
||||
"type": item_type
|
||||
}
|
||||
|
||||
def import_conf_json(self, file_item_path):
|
||||
self.read_json_config(file_item_path)
|
||||
|
||||
def load_versions(self):
|
||||
|
||||
self.register_version("fHDHR", fHDHR_VERSION, "fHDHR")
|
||||
self.register_version("fHDHR_web", self.fHDHR_web.fHDHR_web_VERSION, "fHDHR")
|
||||
|
||||
self.register_version("Python", sys.version, "env")
|
||||
|
||||
opersystem = platform.system()
|
||||
self.register_version("Operating System", opersystem, "env")
|
||||
if opersystem in ["Linux", "Darwin"]:
|
||||
# Linux/Mac
|
||||
if os.getuid() == 0 or os.geteuid() == 0:
|
||||
print('Warning: Do not run fHDHR with root privileges.')
|
||||
elif opersystem in ["Windows"]:
|
||||
# Windows
|
||||
if os.environ.get("USERNAME") == "Administrator":
|
||||
print('Warning: Do not run fHDHR as Administrator.')
|
||||
else:
|
||||
print("Uncommon Operating System, use at your own risk.")
|
||||
|
||||
isdocker = is_docker()
|
||||
self.register_version("Docker", isdocker, "env")
|
||||
|
||||
def user_config(self):
|
||||
print("Loading Configuration File: %s" % self.config_file)
|
||||
self.read_ini_config(self.config_file)
|
||||
|
||||
def config_verification_plugins(self):
|
||||
required_missing = {}
|
||||
# create dict and combine items
|
||||
for config_section in list(self.conf_default.keys()):
|
||||
for config_item in list(self.conf_default[config_section].keys()):
|
||||
if self.conf_default[config_section][config_item]["required"]:
|
||||
if not self.dict[config_section][config_item]:
|
||||
if config_section not in list(required_missing.keys()):
|
||||
required_missing[config_section] = []
|
||||
required_missing[config_section].append(config_item)
|
||||
for config_section in list(required_missing.keys()):
|
||||
print("Warning! Required configuration options missing: [%s]%s" % (config_section, ", ".join(required_missing[config_section])))
|
||||
|
||||
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||
if isinstance(self.dict["epg"]["method"], str):
|
||||
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||
epg_methods = []
|
||||
for epg_method in self.dict["epg"]["method"]:
|
||||
if epg_method in list(self.dict["epg"]["valid_methods"].keys()):
|
||||
epg_methods.append(epg_method)
|
||||
elif epg_method in list(self.dict["origins"]["valid_methods"].keys()):
|
||||
epg_methods.append(epg_method)
|
||||
else:
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||
if self.dict["epg"]["method"]:
|
||||
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||
else:
|
||||
self.dict["epg"]["def_method"] = None
|
||||
|
||||
if self.dict["streaming"]["method"] not in self.dict["streaming"]["valid_methods"]:
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||
|
||||
def config_verification(self):
|
||||
|
||||
if not self.dict["main"]["uuid"]:
|
||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||
self.write('uuid', self.dict["main"]["uuid"], 'main')
|
||||
|
||||
if self.dict["main"]["cache_dir"]:
|
||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||
cache_dir = self.internal["paths"]["cache_dir"]
|
||||
|
||||
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||
self.internal["paths"]["logs_dir"] = logs_dir
|
||||
if not logs_dir.is_dir():
|
||||
logs_dir.mkdir()
|
||||
|
||||
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||
|
||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = None
|
||||
|
||||
def get_real_conf_value(self, key, confvalue):
|
||||
if not confvalue:
|
||||
confvalue = None
|
||||
elif key == "xmltv_offset":
|
||||
confvalue = str(confvalue)
|
||||
elif str(confvalue) in ["0"]:
|
||||
confvalue = 0
|
||||
elif isint(confvalue):
|
||||
confvalue = int(confvalue)
|
||||
elif isfloat(confvalue):
|
||||
confvalue = float(confvalue)
|
||||
elif is_arithmetic(confvalue):
|
||||
confvalue = eval(confvalue)
|
||||
elif "," in confvalue:
|
||||
confvalue = confvalue.split(",")
|
||||
elif str(confvalue).lower() in ["none", ""]:
|
||||
confvalue = None
|
||||
elif str(confvalue).lower() in ["false"]:
|
||||
confvalue = False
|
||||
elif str(confvalue).lower() in ["true"]:
|
||||
confvalue = True
|
||||
return confvalue
|
||||
|
||||
def read_json_config(self, conffilepath):
|
||||
with open(conffilepath, 'r') as jsonconf:
|
||||
confimport = json.load(jsonconf)
|
||||
for section in list(confimport.keys()):
|
||||
|
||||
if section not in self.dict.keys():
|
||||
self.dict[section] = {}
|
||||
|
||||
if section not in self.conf_default.keys():
|
||||
self.conf_default[section] = {}
|
||||
|
||||
for key in list(confimport[section].keys()):
|
||||
|
||||
if key not in list(self.conf_default[section].keys()):
|
||||
self.conf_default[section][key] = {}
|
||||
|
||||
confvalue = self.get_real_conf_value(key, confimport[section][key]["value"])
|
||||
|
||||
self.dict[section][key] = confvalue
|
||||
|
||||
self.conf_default[section][key]["value"] = confvalue
|
||||
|
||||
for config_option in ["config_web_hidden", "config_file", "config_web", "required"]:
|
||||
if config_option not in list(confimport[section][key].keys()):
|
||||
config_option_value = False
|
||||
else:
|
||||
config_option_value = confimport[section][key][config_option]
|
||||
if str(config_option_value).lower() in ["none"]:
|
||||
config_option_value = None
|
||||
elif str(config_option_value).lower() in ["false"]:
|
||||
config_option_value = False
|
||||
elif str(config_option_value).lower() in ["true"]:
|
||||
config_option_value = True
|
||||
self.conf_default[section][key][config_option] = config_option_value
|
||||
|
||||
def read_ini_config(self, conffilepath):
|
||||
def read_config(self, conffilepath):
|
||||
config_handler = configparser.ConfigParser()
|
||||
config_handler.read(conffilepath)
|
||||
for each_section in config_handler.sections():
|
||||
if each_section.lower() not in list(self.dict.keys()):
|
||||
self.dict[each_section.lower()] = {}
|
||||
for (each_key, each_val) in config_handler.items(each_section):
|
||||
each_val = self.get_real_conf_value(each_key, each_val)
|
||||
if not each_val:
|
||||
each_val = None
|
||||
elif each_val.lower() in ["none", "false"]:
|
||||
each_val = False
|
||||
elif each_val.lower() in ["true"]:
|
||||
each_val = True
|
||||
elif isint(each_val):
|
||||
each_val = int(each_val)
|
||||
elif isfloat(each_val):
|
||||
each_val = float(each_val)
|
||||
elif is_arithmetic(each_val):
|
||||
each_val = eval(each_val)
|
||||
elif "," in each_val:
|
||||
each_val = each_val.split(",")
|
||||
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
import_val = True
|
||||
if each_section in list(self.conf_default.keys()):
|
||||
if each_key in list(self.conf_default[each_section].keys()):
|
||||
if not self.conf_default[each_section][each_key]["config_file"]:
|
||||
import_val = False
|
||||
|
||||
if import_val:
|
||||
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||
|
||||
def write(self, key, value, section):
|
||||
|
||||
if not value:
|
||||
value = None
|
||||
if value.lower() in ["none"]:
|
||||
value = None
|
||||
elif value.lower() in ["false"]:
|
||||
value = False
|
||||
elif value.lower() in ["true"]:
|
||||
value = True
|
||||
elif isint(value):
|
||||
value = int(value)
|
||||
elif isfloat(value):
|
||||
value = float(value)
|
||||
elif isinstance(value, list):
|
||||
",".join(value)
|
||||
|
||||
self.dict[section][key] = value
|
||||
def write(self, section, key, value):
|
||||
if section == self.dict["main"]["dictpopname"]:
|
||||
self.dict["origin"][key] = value
|
||||
else:
|
||||
self.dict[section][key] = value
|
||||
|
||||
config_handler = configparser.ConfigParser()
|
||||
config_handler.read(self.config_file)
|
||||
@ -283,12 +81,126 @@ class Config():
|
||||
if not config_handler.has_section(section):
|
||||
config_handler.add_section(section)
|
||||
|
||||
config_handler.set(section, key, str(value))
|
||||
config_handler.set(section, key, value)
|
||||
|
||||
with open(self.config_file, 'w') as config_file:
|
||||
config_handler.write(config_file)
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if name in list(self.dict.keys()):
|
||||
return self.dict[name]
|
||||
def config_verification(self):
|
||||
|
||||
if self.dict["main"]["required"]:
|
||||
required_missing = []
|
||||
if isinstance(self.dict["main"]["required"], str):
|
||||
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||
if len(self.dict["main"]["required"]):
|
||||
for req_item in self.dict["main"]["required"]:
|
||||
req_section = req_item.split("/")[0]
|
||||
req_key = req_item.split("/")[1]
|
||||
if not self.dict[req_section][req_key]:
|
||||
required_missing.append(req_item)
|
||||
if len(required_missing):
|
||||
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
|
||||
|
||||
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||
|
||||
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
||||
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
||||
|
||||
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||
if isinstance(self.dict["epg"]["method"], str):
|
||||
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||
epg_methods = []
|
||||
for epg_method in self.dict["epg"]["method"]:
|
||||
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||
epg_methods.append("origin")
|
||||
elif epg_method in ["None"]:
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||
elif epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||
epg_methods.append(epg_method)
|
||||
else:
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||
|
||||
# generate UUID here for when we are not using docker
|
||||
if not self.dict["main"]["uuid"]:
|
||||
# from https://pynative.com/python-generate-random-string/
|
||||
# create a string that wouldn't be a real device uuid for
|
||||
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||
|
||||
if self.dict["main"]["cache_dir"]:
|
||||
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||
self.dict["filedir"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||
cache_dir = self.dict["filedir"]["cache_dir"]
|
||||
|
||||
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||
self.dict["filedir"]["logs_dir"] = logs_dir
|
||||
if not logs_dir.is_dir():
|
||||
logs_dir.mkdir()
|
||||
|
||||
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||
|
||||
for epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||
if epg_method and epg_method != "None":
|
||||
epg_cache_dir = pathlib.Path(cache_dir).joinpath(epg_method)
|
||||
if not epg_cache_dir.is_dir():
|
||||
epg_cache_dir.mkdir()
|
||||
if epg_method not in list(self.dict["filedir"]["epg_cache"].keys()):
|
||||
self.dict["filedir"]["epg_cache"][epg_method] = {}
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["top"] = epg_cache_dir
|
||||
epg_web_cache_dir = pathlib.Path(epg_cache_dir).joinpath("web_cache")
|
||||
if not epg_web_cache_dir.is_dir():
|
||||
epg_web_cache_dir.mkdir()
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["web_cache"] = epg_web_cache_dir
|
||||
self.dict["filedir"]["epg_cache"][epg_method]["epg_json"] = pathlib.Path(epg_cache_dir).joinpath('epg.json')
|
||||
|
||||
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg"]:
|
||||
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||
|
||||
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||
try:
|
||||
ffmpeg_command = [self.dict["ffmpeg"]["ffmpeg_path"],
|
||||
"-version",
|
||||
"pipe:stdout"
|
||||
]
|
||||
|
||||
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||
ffmpeg_proc.terminate()
|
||||
ffmpeg_proc.communicate()
|
||||
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||
except FileNotFoundError:
|
||||
ffmpeg_version = None
|
||||
self.dict["ffmpeg"]["version"] = ffmpeg_version
|
||||
|
||||
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||
self.dict["fhdhr"]["discovery_address"] = None
|
||||
|
||||
def logging_setup(self):
|
||||
|
||||
log_level = self.dict["logging"]["level"].upper()
|
||||
|
||||
# Create a custom logger
|
||||
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||
logger = logging.getLogger('fHDHR')
|
||||
log_file = os.path.join(self.dict["filedir"]["logs_dir"], 'fHDHR.log')
|
||||
|
||||
# Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
f_handler = logging.FileHandler(log_file)
|
||||
# c_handler.setLevel(log_level)
|
||||
f_handler.setLevel(log_level)
|
||||
|
||||
# Create formatters and add it to handlers
|
||||
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
# c_handler.setFormatter(c_format)
|
||||
f_handler.setFormatter(f_format)
|
||||
|
||||
# Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
logger.addHandler(f_handler)
|
||||
return logger
|
||||
|
||||
@ -32,10 +32,28 @@ MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
||||
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||
|
||||
|
||||
class PluginValues(BASE):
|
||||
__tablename__ = 'plugin_values'
|
||||
class ChannelValues(BASE):
|
||||
__tablename__ = 'channel_values'
|
||||
__table_args__ = MYSQL_TABLE_ARGS
|
||||
pluginitem = Column(String(255), primary_key=True)
|
||||
channel = Column(String(255), primary_key=True)
|
||||
namespace = Column(String(255), primary_key=True)
|
||||
key = Column(String(255), primary_key=True)
|
||||
value = Column(Text())
|
||||
|
||||
|
||||
class ProgramValues(BASE):
|
||||
__tablename__ = 'program_values'
|
||||
__table_args__ = MYSQL_TABLE_ARGS
|
||||
program = Column(String(255), primary_key=True)
|
||||
namespace = Column(String(255), primary_key=True)
|
||||
key = Column(String(255), primary_key=True)
|
||||
value = Column(Text())
|
||||
|
||||
|
||||
class CacheValues(BASE):
|
||||
__tablename__ = 'cache_values'
|
||||
__table_args__ = MYSQL_TABLE_ARGS
|
||||
cacheitem = Column(String(255), primary_key=True)
|
||||
namespace = Column(String(255), primary_key=True)
|
||||
key = Column(String(255), primary_key=True)
|
||||
value = Column(Text())
|
||||
@ -86,7 +104,7 @@ class fHDHRdb(object):
|
||||
db_user = self.config.dict["database"]["user"]
|
||||
db_pass = self.config.dict["database"]["pass"]
|
||||
db_host = self.config.dict["database"]["host"]
|
||||
db_port = self.config.dict["database"]["port"] # Optional
|
||||
db_port = self.config.dict["database"]["prt"] # Optional
|
||||
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
|
||||
|
||||
# Ensure we have all our variables defined
|
||||
@ -130,6 +148,198 @@ class fHDHRdb(object):
|
||||
def get_uri(self):
|
||||
return self.url
|
||||
|
||||
# Channel Values
|
||||
|
||||
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||
channel = channel.lower()
|
||||
value = json.dumps(value, ensure_ascii=False)
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ChannelValues) \
|
||||
.filter(ChannelValues.channel == channel)\
|
||||
.filter(ChannelValues.namespace == namespace)\
|
||||
.filter(ChannelValues.key == key) \
|
||||
.one_or_none()
|
||||
# ChannelValues exists, update
|
||||
if result:
|
||||
result.value = value
|
||||
session.commit()
|
||||
# DNE - Insert
|
||||
else:
|
||||
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||
session.add(new_channelvalue)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_channel_value(self, channel, key, namespace='default'):
|
||||
channel = channel.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ChannelValues) \
|
||||
.filter(ChannelValues.channel == channel)\
|
||||
.filter(ChannelValues.namespace == namespace)\
|
||||
.filter(ChannelValues.key == key) \
|
||||
.one_or_none()
|
||||
if result is not None:
|
||||
result = result.value
|
||||
return _deserialize(result)
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def delete_channel_value(self, channel, key, namespace='default'):
|
||||
channel = channel.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ChannelValues) \
|
||||
.filter(ChannelValues.channel == channel)\
|
||||
.filter(ChannelValues.namespace == namespace)\
|
||||
.filter(ChannelValues.key == key) \
|
||||
.one_or_none()
|
||||
# ChannelValues exists, delete
|
||||
if result:
|
||||
session.delete(result)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# Program Values
|
||||
|
||||
def set_program_value(self, program, key, value, namespace='default'):
|
||||
program = program.lower()
|
||||
value = json.dumps(value, ensure_ascii=False)
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ProgramValues) \
|
||||
.filter(ProgramValues.program == program)\
|
||||
.filter(ProgramValues.namespace == namespace)\
|
||||
.filter(ProgramValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, update
|
||||
if result:
|
||||
result.value = value
|
||||
session.commit()
|
||||
# DNE - Insert
|
||||
else:
|
||||
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||
session.add(new_programvalue)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_program_value(self, program, key, namespace='default'):
|
||||
program = program.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ProgramValues) \
|
||||
.filter(ProgramValues.program == program)\
|
||||
.filter(ProgramValues.namespace == namespace)\
|
||||
.filter(ProgramValues.key == key) \
|
||||
.one_or_none()
|
||||
if result is not None:
|
||||
result = result.value
|
||||
return _deserialize(result)
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def delete_program_value(self, program, key, namespace='default'):
|
||||
program = program.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(ProgramValues) \
|
||||
.filter(ProgramValues.program == program)\
|
||||
.filter(ProgramValues.namespace == namespace)\
|
||||
.filter(ProgramValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, delete
|
||||
if result:
|
||||
session.delete(result)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# Cache Values
|
||||
|
||||
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||
cacheitem = cacheitem.lower()
|
||||
value = json.dumps(value, ensure_ascii=False)
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(CacheValues) \
|
||||
.filter(CacheValues.cacheitem == cacheitem)\
|
||||
.filter(CacheValues.namespace == namespace)\
|
||||
.filter(CacheValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, update
|
||||
if result:
|
||||
result.value = value
|
||||
session.commit()
|
||||
# DNE - Insert
|
||||
else:
|
||||
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||
session.add(new_cacheitemvalue)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||
cacheitem = cacheitem.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(CacheValues) \
|
||||
.filter(CacheValues.cacheitem == cacheitem)\
|
||||
.filter(CacheValues.namespace == namespace)\
|
||||
.filter(CacheValues.key == key) \
|
||||
.one_or_none()
|
||||
if result is not None:
|
||||
result = result.value
|
||||
return _deserialize(result)
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||
cacheitem = cacheitem.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(CacheValues) \
|
||||
.filter(CacheValues.cacheitem == cacheitem)\
|
||||
.filter(CacheValues.namespace == namespace)\
|
||||
.filter(CacheValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, delete
|
||||
if result:
|
||||
session.delete(result)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# fHDHR Values
|
||||
|
||||
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||
@ -148,8 +358,8 @@ class fHDHRdb(object):
|
||||
session.commit()
|
||||
# DNE - Insert
|
||||
else:
|
||||
new_pluginitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||
session.add(new_pluginitemvalue)
|
||||
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||
session.add(new_cacheitemvalue)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
@ -193,67 +403,3 @@ class fHDHRdb(object):
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
# Plugin Values
|
||||
|
||||
def set_plugin_value(self, pluginitem, key, value, namespace='default'):
|
||||
pluginitem = pluginitem.lower()
|
||||
value = json.dumps(value, ensure_ascii=False)
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(PluginValues) \
|
||||
.filter(PluginValues.pluginitem == pluginitem)\
|
||||
.filter(PluginValues.namespace == namespace)\
|
||||
.filter(PluginValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, update
|
||||
if result:
|
||||
result.value = value
|
||||
session.commit()
|
||||
# DNE - Insert
|
||||
else:
|
||||
new_pluginitemvalue = PluginValues(pluginitem=pluginitem, namespace=namespace, key=key, value=value)
|
||||
session.add(new_pluginitemvalue)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def get_plugin_value(self, pluginitem, key, namespace='default'):
|
||||
pluginitem = pluginitem.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(PluginValues) \
|
||||
.filter(PluginValues.pluginitem == pluginitem)\
|
||||
.filter(PluginValues.namespace == namespace)\
|
||||
.filter(PluginValues.key == key) \
|
||||
.one_or_none()
|
||||
if result is not None:
|
||||
result = result.value
|
||||
return _deserialize(result)
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
def delete_plugin_value(self, pluginitem, key, namespace='default'):
|
||||
pluginitem = pluginitem.lower()
|
||||
session = self.ssession()
|
||||
try:
|
||||
result = session.query(PluginValues) \
|
||||
.filter(PluginValues.pluginitem == pluginitem)\
|
||||
.filter(PluginValues.namespace == namespace)\
|
||||
.filter(PluginValues.key == key) \
|
||||
.one_or_none()
|
||||
# ProgramValue exists, delete
|
||||
if result:
|
||||
session.delete(result)
|
||||
session.commit()
|
||||
except SQLAlchemyError:
|
||||
session.rollback()
|
||||
raise
|
||||
finally:
|
||||
session.close()
|
||||
|
||||
@ -1,34 +1,29 @@
|
||||
from .channels import Channels
|
||||
from .epg import EPG
|
||||
from .tuners import Tuners
|
||||
from .watch import WatchStream
|
||||
from .images import imageHandler
|
||||
from .station_scan import Station_Scan
|
||||
from .ssdp import SSDPServer
|
||||
from .cluster import fHDHR_Cluster
|
||||
|
||||
|
||||
class fHDHR_Device():
|
||||
|
||||
def __init__(self, fhdhr, origins):
|
||||
self.fhdhr = fhdhr
|
||||
def __init__(self, settings, fhdhr_version, origin, logger, web, db):
|
||||
|
||||
self.channels = Channels(fhdhr, origins)
|
||||
self.channels = Channels(settings, origin, logger, db)
|
||||
|
||||
self.epg = EPG(fhdhr, self.channels, origins)
|
||||
self.epg = EPG(settings, self.channels, origin, logger, web, db)
|
||||
|
||||
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||
self.tuners = Tuners(settings, self.epg, logger)
|
||||
|
||||
self.images = imageHandler(fhdhr, self.epg)
|
||||
self.watch = WatchStream(settings, self.channels, self.tuners, logger, web)
|
||||
|
||||
self.ssdp = SSDPServer(fhdhr)
|
||||
self.images = imageHandler(settings, self.epg, logger, web)
|
||||
|
||||
self.interfaces = {}
|
||||
self.station_scan = Station_Scan(settings, self.channels, logger, db)
|
||||
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].manifest["type"] == "interface":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||
plugin_utils.channels = self.channels
|
||||
plugin_utils.epg = self.epg
|
||||
plugin_utils.tuners = self.tuners
|
||||
plugin_utils.images = self.images
|
||||
plugin_utils.ssdp = self.ssdp
|
||||
self.interfaces[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, plugin_utils)
|
||||
self.ssdp = SSDPServer(settings, fhdhr_version, logger, db)
|
||||
|
||||
self.cluster = fHDHR_Cluster(settings, self.ssdp, logger, db, web)
|
||||
|
||||
152
fHDHR/device/channels.py
Normal file
@ -0,0 +1,152 @@
|
||||
import datetime
|
||||
from collections import OrderedDict
|
||||
|
||||
from fHDHR.tools import hours_between_datetime
|
||||
|
||||
|
||||
class ChannelNumbers():
|
||||
|
||||
def __init__(self, settings, logger, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.db = db
|
||||
|
||||
def get_number(self, channel_id):
|
||||
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
|
||||
if channel_id in list(cnumbers.keys()):
|
||||
return cnumbers[channel_id]
|
||||
|
||||
used_numbers = []
|
||||
for channel_id in list(cnumbers.keys()):
|
||||
used_numbers.append(cnumbers[channel_id])
|
||||
|
||||
for i in range(1, 1000):
|
||||
if str(float(i)) not in used_numbers:
|
||||
break
|
||||
return str(float(i))
|
||||
|
||||
def set_number(self, channel_id, channel_number):
|
||||
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
|
||||
cnumbers[channel_id] = str(float(channel_number))
|
||||
self.db.set_fhdhr_value("channel_numbers", "list", cnumbers)
|
||||
|
||||
|
||||
class Channels():
|
||||
|
||||
def __init__(self, settings, origin, logger, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.origin = origin
|
||||
self.db = db
|
||||
|
||||
self.channel_numbers = ChannelNumbers(settings, logger, db)
|
||||
|
||||
self.list = {}
|
||||
self.list_update_time = None
|
||||
self.get_channels()
|
||||
|
||||
def get_origin_status(self):
|
||||
try:
|
||||
return self.origin.get_status_dict()
|
||||
except AttributeError:
|
||||
return {}
|
||||
|
||||
def get_channels(self, forceupdate=False):
|
||||
"""Pull Channels from origin.
|
||||
|
||||
Output a list.
|
||||
|
||||
Don't pull more often than 12 hours.
|
||||
"""
|
||||
|
||||
updatelist = False
|
||||
if not self.list_update_time:
|
||||
updatelist = True
|
||||
elif hours_between_datetime(self.list_update_time, datetime.datetime.now()) > 12:
|
||||
updatelist = True
|
||||
elif forceupdate:
|
||||
updatelist = True
|
||||
|
||||
if updatelist:
|
||||
channel_dict_list = self.origin.get_channels()
|
||||
channel_dict_list = self.verify_channel_info(channel_dict_list)
|
||||
self.append_channel_info(channel_dict_list)
|
||||
if not self.list_update_time:
|
||||
self.logger.info("Found " + str(len(self.list)) + " channels for " + str(self.config.dict["main"]["servicename"]))
|
||||
self.list_update_time = datetime.datetime.now()
|
||||
|
||||
channel_list = []
|
||||
for chandict in list(self.list.keys()):
|
||||
channel_list.append(self.list[chandict])
|
||||
return channel_list
|
||||
|
||||
def get_station_list(self, base_url):
|
||||
station_list = []
|
||||
|
||||
for c in self.get_channels():
|
||||
station_list.append({
|
||||
'GuideNumber': c['number'],
|
||||
'GuideName': c['name'],
|
||||
'URL': self.get_fhdhr_stream_url(base_url, c['number']),
|
||||
})
|
||||
return station_list
|
||||
|
||||
def get_channel_stream(self, channel_number):
|
||||
if channel_number not in list(self.list.keys()):
|
||||
self.get_channels()
|
||||
if channel_number not in list(self.list.keys()):
|
||||
return None
|
||||
if "stream_url" not in list(self.list[channel_number].keys()):
|
||||
chandict = self.get_channel_dict("number", channel_number)
|
||||
streamlist, caching = self.origin.get_channel_stream(chandict, self.list)
|
||||
if caching:
|
||||
self.append_channel_info(streamlist)
|
||||
return self.list[channel_number]["stream_url"]
|
||||
else:
|
||||
chanstreamdict = next(item for item in streamlist if item["number"] == channel_number)
|
||||
return chanstreamdict["stream_url"]
|
||||
return self.list[channel_number]["stream_url"]
|
||||
|
||||
def get_station_total(self):
|
||||
return len(list(self.list.keys()))
|
||||
|
||||
def get_channel_dict(self, keyfind, valfind):
|
||||
chanlist = self.get_channels()
|
||||
return next(item for item in chanlist if item[keyfind] == valfind)
|
||||
|
||||
def get_fhdhr_stream_url(self, base_url, channel_number):
|
||||
return ('%s/auto/v%s' %
|
||||
(base_url,
|
||||
channel_number))
|
||||
|
||||
def verify_channel_info(self, channel_dict_list):
|
||||
"""Some Channel Information is Critical"""
|
||||
cleaned_channel_dict_list = []
|
||||
for station_item in channel_dict_list:
|
||||
if "callsign" not in list(station_item.keys()):
|
||||
station_item["callsign"] = station_item["name"]
|
||||
if "id" not in list(station_item.keys()):
|
||||
station_item["id"] = station_item["name"]
|
||||
if "number" not in list(station_item.keys()):
|
||||
station_item["number"] = self.channel_numbers.get_number(station_item["id"])
|
||||
else:
|
||||
station_item["number"] = str(float(station_item["number"]))
|
||||
self.channel_numbers.set_number(station_item["id"], station_item["number"])
|
||||
cleaned_channel_dict_list.append(station_item)
|
||||
return cleaned_channel_dict_list
|
||||
|
||||
def append_channel_info(self, channel_dict_list):
|
||||
"""Update the list dict
|
||||
|
||||
Take the channel dict list given.
|
||||
"""
|
||||
for chan in channel_dict_list:
|
||||
if chan["number"] not in list(self.list.keys()):
|
||||
self.list[chan["number"]] = {}
|
||||
for chankey in list(chan.keys()):
|
||||
self.list[chan["number"]][chankey] = chan[chankey]
|
||||
self.channel_order()
|
||||
|
||||
def channel_order(self):
|
||||
"""Verify the Channel Order"""
|
||||
self.list = OrderedDict(sorted(self.list.items()))
|
||||
@ -1,183 +0,0 @@
|
||||
import time
|
||||
|
||||
from fHDHR.tools import humanized_time
|
||||
|
||||
from .channel import Channel
|
||||
from .chan_ident import Channel_IDs
|
||||
|
||||
|
||||
class Channels():
|
||||
|
||||
def __init__(self, fhdhr, origins):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.origins = origins
|
||||
|
||||
self.id_system = Channel_IDs(fhdhr, origins)
|
||||
|
||||
self.list = {}
|
||||
for origin in list(self.origins.origins_dict.keys()):
|
||||
self.list[origin] = {}
|
||||
|
||||
self.get_db_channels()
|
||||
|
||||
def get_channel_obj(self, keyfind, valfind, origin=None):
|
||||
if origin:
|
||||
origin = origin.lower()
|
||||
if keyfind == "number":
|
||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
||||
else:
|
||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
||||
if len(matches):
|
||||
return self.list[origin][matches[0]]
|
||||
else:
|
||||
matches = []
|
||||
for origin in list(self.list.keys()):
|
||||
if keyfind == "number":
|
||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
|
||||
else:
|
||||
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
|
||||
if len(matches):
|
||||
return self.list[origin][matches[0]]
|
||||
if len(matches):
|
||||
return self.list[origin][matches[0]]
|
||||
return None
|
||||
|
||||
def get_channel_list(self, keyfind, origin=None):
|
||||
if origin:
|
||||
if keyfind == "number":
|
||||
return [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
||||
else:
|
||||
return [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
||||
else:
|
||||
matches = []
|
||||
for origin in list(self.list.keys()):
|
||||
if keyfind == "number":
|
||||
next_match = [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
|
||||
else:
|
||||
next_match = [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
|
||||
if len(next_match):
|
||||
matches.append(next_match)
|
||||
return matches[0]
|
||||
|
||||
def get_channel_dict(self, keyfind, valfind, origin=None):
|
||||
chan_obj = self.get_channel_obj(keyfind, valfind, origin)
|
||||
if chan_obj:
|
||||
return chan_obj.dict
|
||||
return None
|
||||
|
||||
def set_channel_status(self, keyfind, valfind, updatedict, origin):
|
||||
self.get_channel_obj(keyfind, valfind, origin).set_status(updatedict)
|
||||
|
||||
def set_channel_enablement_all(self, enablement, origin):
|
||||
for fhdhr_id in [x["id"] for x in self.get_channels(origin)]:
|
||||
self.list[fhdhr_id].set_enablement(enablement, origin)
|
||||
|
||||
def set_channel_enablement(self, keyfind, valfind, enablement, origin):
|
||||
self.get_channel_obj(keyfind, valfind, origin).set_enablement(enablement)
|
||||
|
||||
def set_channel_favorite(self, keyfind, valfind, enablement, origin):
|
||||
self.get_channel_obj(keyfind, valfind, origin).set_favorite(enablement)
|
||||
|
||||
def get_db_channels(self, origin=None):
|
||||
|
||||
if not origin:
|
||||
origins_list = list(self.list.keys())
|
||||
else:
|
||||
origins_list = origin.lower()
|
||||
|
||||
if isinstance(origins_list, str):
|
||||
origins_list = [origins_list]
|
||||
|
||||
for origin in origins_list:
|
||||
self.fhdhr.logger.info("Checking for %s Channel information stored in the database." % origin)
|
||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||
if len(channel_ids):
|
||||
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||
for channel_id in channel_ids:
|
||||
channel_obj = Channel(self.fhdhr, self.id_system, origin=origin, channel_id=channel_id)
|
||||
channel_id = channel_obj.dict["id"]
|
||||
self.list[origin][channel_id] = channel_obj
|
||||
|
||||
def save_db_channels(self, origin=None):
|
||||
if not origin:
|
||||
origins_list = list(self.list.keys())
|
||||
else:
|
||||
origins_list = origin.lower()
|
||||
|
||||
if isinstance(origins_list, str):
|
||||
origins_list = [origins_list]
|
||||
|
||||
for origin in origins_list:
|
||||
channel_ids = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys())]
|
||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, origin)
|
||||
|
||||
def get_channels(self, origin=None, forceupdate=False):
|
||||
"""Pull Channels from origin.
|
||||
|
||||
Output a list.
|
||||
|
||||
Don't pull more often than 12 hours.
|
||||
"""
|
||||
|
||||
if not origin:
|
||||
origins_list = list(self.list.keys())
|
||||
else:
|
||||
origins_list = origin.lower().lower()
|
||||
|
||||
if isinstance(origins_list, str):
|
||||
origins_list = [origins_list]
|
||||
|
||||
return_chan_list = []
|
||||
for origin in origins_list:
|
||||
|
||||
if not len(list(self.list[origin].keys())):
|
||||
self.get_db_channels(origin=origin)
|
||||
|
||||
if not forceupdate:
|
||||
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
||||
|
||||
else:
|
||||
|
||||
channel_origin_id_list = [str(self.list[origin][x].dict["origin_id"]) for x in list(self.list[origin].keys())]
|
||||
|
||||
self.fhdhr.logger.info("Performing Channel Scan for %s." % origin)
|
||||
|
||||
channel_dict_list = self.origins.origins_dict[origin].get_channels()
|
||||
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), origin))
|
||||
|
||||
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
|
||||
|
||||
newchan = 0
|
||||
chan_scan_start = time.time()
|
||||
for channel_info in channel_dict_list:
|
||||
|
||||
chan_existing = str(channel_info["id"]) in channel_origin_id_list
|
||||
|
||||
if chan_existing:
|
||||
channel_obj = self.get_channel_obj("origin_id", channel_info["id"], origin)
|
||||
else:
|
||||
channel_obj = Channel(self.fhdhr, self.id_system, origin, origin_id=channel_info["id"])
|
||||
|
||||
channel_id = channel_obj.dict["id"]
|
||||
channel_obj.basics(channel_info)
|
||||
if not chan_existing:
|
||||
self.list[origin][channel_id] = channel_obj
|
||||
newchan += 1
|
||||
|
||||
self.fhdhr.logger.info("%s Channel Import took %s" % (origin, humanized_time(time.time() - chan_scan_start)))
|
||||
|
||||
if not newchan:
|
||||
newchan = "no"
|
||||
self.fhdhr.logger.info("Found %s NEW channels for %s." % (newchan, origin))
|
||||
|
||||
self.fhdhr.logger.info("Total %s Channel Count: %s" % (origin, len(self.list[origin].keys())))
|
||||
self.save_db_channels(origin=origin)
|
||||
|
||||
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time(), origin)
|
||||
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
|
||||
|
||||
return return_chan_list
|
||||
|
||||
def get_channel_stream(self, stream_args, origin):
|
||||
return self.origins.origins_dict[origin].get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)
|
||||
@ -1,46 +0,0 @@
|
||||
import uuid
|
||||
|
||||
|
||||
class Channel_IDs():
|
||||
def __init__(self, fhdhr, origins):
|
||||
self.fhdhr = fhdhr
|
||||
self.origins = origins
|
||||
|
||||
def get(self, origin_id, origin):
|
||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
||||
for existing_channel in existing_channel_info:
|
||||
if existing_channel["origin_id"] == origin_id:
|
||||
return existing_channel["id"]
|
||||
return self.assign(origin)
|
||||
|
||||
def assign(self, origin):
|
||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||
channel_id = None
|
||||
while not channel_id:
|
||||
unique_id = str(uuid.uuid4())
|
||||
if str(unique_id) not in existing_ids:
|
||||
channel_id = str(unique_id)
|
||||
existing_ids.append(channel_id)
|
||||
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids, origin)
|
||||
return channel_id
|
||||
|
||||
def get_number(self, channel_id, origin):
|
||||
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
|
||||
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
|
||||
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||
if cnumber:
|
||||
return cnumber
|
||||
|
||||
used_numbers = []
|
||||
for existing_channel in existing_channel_info:
|
||||
if existing_channel["subnumber"]:
|
||||
number = "%s.%s" % (existing_channel["number"], existing_channel["subnumber"])
|
||||
else:
|
||||
number = existing_channel["number"]
|
||||
used_numbers.append(number)
|
||||
|
||||
for i in range(1000, 2000):
|
||||
if str(float(i)) not in used_numbers:
|
||||
break
|
||||
return str(float(i))
|
||||
@ -1,197 +0,0 @@
|
||||
import time
|
||||
|
||||
|
||||
class Channel():
|
||||
|
||||
def __init__(self, fhdhr, id_system, origin, origin_id=None, channel_id=None):
|
||||
self.fhdhr = fhdhr
|
||||
self.origin = origin
|
||||
|
||||
self.id_system = id_system
|
||||
|
||||
if not channel_id:
|
||||
if origin_id:
|
||||
channel_id = id_system.get(origin_id, origin)
|
||||
else:
|
||||
channel_id = id_system.assign(origin)
|
||||
self.channel_id = channel_id
|
||||
|
||||
self.dict = self.fhdhr.db.get_fhdhr_value(str(channel_id), "dict", self.origin) or self.default_dict
|
||||
self.verify_dict()
|
||||
|
||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||
|
||||
@property
|
||||
def number(self):
|
||||
if self.dict["subnumber"]:
|
||||
return "%s.%s" % (self.dict["number"], self.dict["subnumber"])
|
||||
else:
|
||||
return self.dict["number"]
|
||||
|
||||
@property
|
||||
def thumbnail(self):
|
||||
if str(self.dict["thumbnail"]).lower() in ["none"]:
|
||||
return self.generic_image_url
|
||||
elif self.dict["thumbnail"]:
|
||||
return self.dict["thumbnail"]
|
||||
elif self.dict["origin_thumbnail"]:
|
||||
return self.dict["origin_thumbnail"]
|
||||
else:
|
||||
return self.generic_image_url
|
||||
|
||||
@property
|
||||
def epgdict(self):
|
||||
return {
|
||||
"callsign": self.dict["callsign"],
|
||||
"name": self.dict["name"],
|
||||
"number": self.number,
|
||||
"id": self.dict["origin_id"],
|
||||
"thumbnail": self.thumbnail,
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
def verify_dict(self):
|
||||
"""Development Purposes
|
||||
Add new Channel dict keys
|
||||
"""
|
||||
default_dict = self.default_dict
|
||||
for key in list(default_dict.keys()):
|
||||
if key not in list(self.dict.keys()):
|
||||
self.dict[key] = default_dict[key]
|
||||
if self.dict["number"]:
|
||||
if "." in self.dict["number"]:
|
||||
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
||||
self.dict["number"] = self.dict["number"].split(".")[0]
|
||||
|
||||
def basics(self, channel_info):
|
||||
"""Some Channel Information is Critical"""
|
||||
|
||||
if "name" not in list(channel_info.keys()):
|
||||
channel_info["name"] = self.dict["id"]
|
||||
elif not channel_info["name"]:
|
||||
channel_info["name"] = self.dict["id"]
|
||||
self.dict["origin_name"] = channel_info["name"]
|
||||
if not self.dict["name"]:
|
||||
self.dict["name"] = self.dict["origin_name"]
|
||||
|
||||
if "id" not in list(channel_info.keys()):
|
||||
channel_info["id"] = channel_info["name"]
|
||||
elif not channel_info["id"]:
|
||||
channel_info["id"] = channel_info["name"]
|
||||
self.dict["origin_id"] = channel_info["id"]
|
||||
|
||||
if "callsign" not in list(channel_info.keys()):
|
||||
channel_info["callsign"] = channel_info["name"]
|
||||
elif not channel_info["callsign"]:
|
||||
channel_info["callsign"] = channel_info["name"]
|
||||
self.dict["origin_callsign"] = channel_info["callsign"]
|
||||
if not self.dict["callsign"]:
|
||||
self.dict["callsign"] = self.dict["origin_callsign"]
|
||||
|
||||
if "tags" not in list(channel_info.keys()):
|
||||
channel_info["tags"] = []
|
||||
elif not channel_info["tags"]:
|
||||
channel_info["tags"] = []
|
||||
self.dict["origin_tags"] = channel_info["tags"]
|
||||
if not self.dict["tags"]:
|
||||
self.dict["tags"] = self.dict["origin_tags"]
|
||||
|
||||
if "number" not in list(channel_info.keys()):
|
||||
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
||||
elif not channel_info["number"]:
|
||||
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
|
||||
self.dict["origin_number"] = str(channel_info["number"])
|
||||
if not self.dict["number"]:
|
||||
self.dict["number"] = self.dict["origin_number"].split(".")[0]
|
||||
try:
|
||||
self.dict["subnumber"] = self.dict["origin_number"].split(".")[1]
|
||||
except IndexError:
|
||||
self.dict["subnumber"] = None
|
||||
else:
|
||||
if "." in self.dict["number"]:
|
||||
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
||||
self.dict["number"] = self.dict["number"].split(".")[0]
|
||||
|
||||
if "thumbnail" not in list(channel_info.keys()):
|
||||
channel_info["thumbnail"] = None
|
||||
self.dict["origin_thumbnail"] = channel_info["thumbnail"]
|
||||
if not self.dict["thumbnail"]:
|
||||
self.dict["thumbnail"] = self.dict["origin_thumbnail"]
|
||||
|
||||
if "HD" not in list(channel_info.keys()):
|
||||
channel_info["HD"] = 0
|
||||
self.dict["HD"] = channel_info["HD"]
|
||||
|
||||
if "enabled" in list(channel_info.keys()):
|
||||
if "created" not in list(self.dict.keys()):
|
||||
self.dict["enabled"] = channel_info["enabled"]
|
||||
|
||||
if "created" not in list(self.dict.keys()):
|
||||
self.dict["created"] = time.time()
|
||||
|
||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||
|
||||
@property
|
||||
def default_dict(self):
|
||||
return {
|
||||
"id": str(self.channel_id), "origin_id": None,
|
||||
"name": None, "origin_name": None,
|
||||
"callsign": None, "origin_callsign": None,
|
||||
"number": None, "subnumber": None, "origin_number": None,
|
||||
"tags": [], "origin_tags": [],
|
||||
"thumbnail": None, "origin_thumbnail": None,
|
||||
"enabled": True, "favorite": 0,
|
||||
"HD": 0,
|
||||
}
|
||||
|
||||
def destroy(self):
|
||||
self.fhdhr.db.delete_fhdhr_value(self.dict["id"], "dict", self.origin)
|
||||
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||
if self.dict["id"] in channel_ids:
|
||||
channel_ids.remove(self.dict["id"])
|
||||
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, self.origin)
|
||||
|
||||
def set_status(self, updatedict):
|
||||
for key in list(updatedict.keys()):
|
||||
if key == "number":
|
||||
updatedict[key] = str(updatedict[key])
|
||||
self.dict[key] = updatedict[key]
|
||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
|
||||
|
||||
@property
|
||||
def generic_image_url(self):
|
||||
return "/api/images?method=generate&type=channel&message=%s" % self.number
|
||||
|
||||
@property
|
||||
def api_stream_url(self):
|
||||
return '/api/tuners?method=stream&stream_method=%s&channel=%s&origin=%s' % (self.fhdhr.origins.origins_dict[self.origin].stream_method, self.dict["id"], self.origin)
|
||||
|
||||
@property
|
||||
def api_m3u_url(self):
|
||||
return '/api/m3u?method=get&channel=%s&origin=%s' % (self.dict["id"], self.origin)
|
||||
|
||||
def set_favorite(self, enablement):
|
||||
if enablement == "+":
|
||||
self.dict["favorite"] = 1
|
||||
elif enablement == "-":
|
||||
self.dict["favorite"] = 0
|
||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
||||
|
||||
def set_enablement(self, enablement):
|
||||
if enablement == "disable":
|
||||
self.dict["enabled"] = False
|
||||
elif enablement == "enable":
|
||||
self.dict["enabled"] = True
|
||||
elif enablement == "toggle":
|
||||
if self.dict["enabled"]:
|
||||
self.dict["enabled"] = False
|
||||
else:
|
||||
self.dict["enabled"] = True
|
||||
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if name in list(self.dict.keys()):
|
||||
return self.dict[name]
|
||||
else:
|
||||
return None
|
||||
140
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,140 @@
|
||||
import urllib.parse
|
||||
from collections import OrderedDict
|
||||
|
||||
|
||||
class fHDHR_Cluster():
|
||||
|
||||
def __init__(self, settings, ssdp, logger, db, web):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.ssdp = ssdp
|
||||
self.db = db
|
||||
self.web = web
|
||||
|
||||
self.friendlyname = self.config.dict["fhdhr"]["friendlyname"]
|
||||
self.location = None
|
||||
self.location_url = None
|
||||
if settings.dict["fhdhr"]["discovery_address"]:
|
||||
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
|
||||
str(settings.dict["fhdhr"]["port"]))
|
||||
self.location_url = urllib.parse.quote(self.location)
|
||||
|
||||
self.startup_sync()
|
||||
|
||||
def cluster(self):
|
||||
return self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
|
||||
def get_list(self):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
return_dict = {}
|
||||
for location in list(cluster.keys()):
|
||||
if location != self.location:
|
||||
return_dict[location] = {
|
||||
"Joined": True
|
||||
}
|
||||
|
||||
detected_list = self.ssdp.detect_method.get()
|
||||
for location in detected_list:
|
||||
if location not in list(cluster.keys()):
|
||||
return_dict[location] = {
|
||||
"Joined": False
|
||||
}
|
||||
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||
return return_dict
|
||||
|
||||
def default_cluster(self):
|
||||
defdict = {}
|
||||
defdict[self.location] = {
|
||||
"base_url": self.location,
|
||||
"name": self.friendlyname
|
||||
}
|
||||
return defdict
|
||||
|
||||
def startup_sync(self):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
for location in list(cluster.keys()):
|
||||
if location != self.location:
|
||||
sync_url = location + "/api/cluster?method=get"
|
||||
try:
|
||||
sync_open = self.web.session.get(sync_url)
|
||||
retrieved_cluster = sync_open.json()
|
||||
if self.location not in list(retrieved_cluster.keys()):
|
||||
return self.leave()
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
|
||||
def leave(self):
|
||||
self.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||
|
||||
def disconnect(self):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
for location in list(cluster.keys()):
|
||||
if location != self.location:
|
||||
sync_url = location + "/api/cluster?method=del&location=" + self.location
|
||||
try:
|
||||
self.web.session.get(sync_url)
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
self.leave()
|
||||
|
||||
def sync(self, location):
|
||||
sync_url = location + "/api/cluster?method=get"
|
||||
try:
|
||||
sync_open = self.web.session.get(sync_url)
|
||||
self.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
|
||||
def push_sync(self):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
for location in list(cluster.keys()):
|
||||
if location != self.location:
|
||||
sync_url = location + "/api/cluster?method=sync&location=" + self.location_url
|
||||
try:
|
||||
self.web.session.get(sync_url)
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
|
||||
def add(self, location):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
if location not in list(cluster.keys()):
|
||||
cluster[location] = {"base_url": location}
|
||||
|
||||
location_info_url = location + "/discover.json"
|
||||
try:
|
||||
location_info_req = self.web.session.get(location_info_url)
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
del cluster[location]
|
||||
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||
return
|
||||
location_info = location_info_req.json()
|
||||
cluster[location]["name"] = location_info["FriendlyName"]
|
||||
|
||||
cluster_info_url = location + "/api/cluster?method=get"
|
||||
try:
|
||||
cluster_info_req = self.web.session.get(cluster_info_url)
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
del cluster[location]
|
||||
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||
return
|
||||
cluster_info = cluster_info_req.json()
|
||||
for cluster_key in list(cluster_info.keys()):
|
||||
if cluster_key not in list(cluster.keys()):
|
||||
cluster[cluster_key] = cluster_info[cluster_key]
|
||||
|
||||
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||
self.push_sync()
|
||||
|
||||
def remove(self, location):
|
||||
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||
if location in list(cluster.keys()):
|
||||
del cluster[location]
|
||||
sync_url = location + "/api/cluster?method=leave"
|
||||
try:
|
||||
self.web.session.get(sync_url)
|
||||
except self.web.exceptions.ConnectionError:
|
||||
self.logger.error("Unreachable: " + location)
|
||||
self.push_sync()
|
||||
self.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||
153
fHDHR/device/epg.py
Normal file
@ -0,0 +1,153 @@
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
from collections import OrderedDict
|
||||
|
||||
epgtype_list = []
|
||||
device_dir = os.path.dirname(__file__)
|
||||
for entry in os.scandir(device_dir + '/epgtypes'):
|
||||
if entry.is_file():
|
||||
if entry.name[0] != '_':
|
||||
epgtype_list.append(str(entry.name[:-3]))
|
||||
impstring = f'from .epgtypes import {entry.name}'[:-3]
|
||||
exec(impstring)
|
||||
|
||||
|
||||
class EPG():
|
||||
|
||||
def __init__(self, settings, channels, origin, logger, web, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.origin = origin
|
||||
self.channels = channels
|
||||
self.web = web
|
||||
self.db = db
|
||||
|
||||
self.epgdict = {}
|
||||
|
||||
self.epg_method_selfadd()
|
||||
|
||||
self.epg_methods = self.config.dict["epg"]["method"]
|
||||
self.def_method = self.config.dict["epg"]["def_method"]
|
||||
self.sleeptime = {}
|
||||
for epg_method in self.epg_methods:
|
||||
if epg_method in list(self.config.dict.keys()):
|
||||
if "update_frequency" in list(self.config.dict[epg_method].keys()):
|
||||
self.sleeptime[epg_method] = self.config.dict[epg_method]["update_frequency"]
|
||||
if epg_method not in list(self.sleeptime.keys()):
|
||||
self.sleeptime[epg_method] = self.config.dict["epg"]["update_frequency"]
|
||||
|
||||
def whats_on_now(self, channel):
|
||||
epgdict = self.get_epg()
|
||||
listings = epgdict[channel]["listing"]
|
||||
for listing in listings:
|
||||
nowtime = datetime.datetime.utcnow()
|
||||
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
||||
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
||||
if start_time <= nowtime <= end_time:
|
||||
epgitem = epgdict[channel].copy()
|
||||
epgitem["listing"] = [listing]
|
||||
return epgitem
|
||||
return None
|
||||
|
||||
def whats_on_allchans(self):
|
||||
channel_guide_list = []
|
||||
for channel in self.channels.get_channels():
|
||||
whatson = self.whats_on_now(channel["number"])
|
||||
if whatson:
|
||||
channel_guide_list.append(whatson)
|
||||
return channel_guide_list
|
||||
|
||||
def get_epg(self, method=None):
|
||||
|
||||
if not method:
|
||||
method = self.def_method
|
||||
if (method == self.config.dict["main"]["dictpopname"] or
|
||||
method not in self.config.dict["main"]["valid_epg_methods"]):
|
||||
method = "origin"
|
||||
|
||||
if method not in list(self.epgdict.keys()):
|
||||
|
||||
epgdict = self.db.get_fhdhr_value("epg_dict", method) or None
|
||||
if not epgdict:
|
||||
self.update(method)
|
||||
self.epgdict[method] = self.db.get_fhdhr_value("epg_dict", method) or {}
|
||||
else:
|
||||
self.epgdict[method] = epgdict
|
||||
return self.epgdict[method]
|
||||
else:
|
||||
return self.epgdict[method]
|
||||
|
||||
def get_thumbnail(self, itemtype, itemid):
|
||||
if itemtype == "channel":
|
||||
chandict = self.find_channel_dict(itemid)
|
||||
return chandict["thumbnail"]
|
||||
elif itemtype == "content":
|
||||
progdict = self.find_program_dict(itemid)
|
||||
return progdict["thumbnail"]
|
||||
return None
|
||||
|
||||
def find_channel_dict(self, channel_id):
|
||||
epgdict = self.get_epg()
|
||||
channel_list = []
|
||||
for channel in list(epgdict.keys()):
|
||||
channel_list.append(epgdict[channel])
|
||||
return next(item for item in channel_list if item["id"] == channel_id)
|
||||
|
||||
def find_program_dict(self, event_id):
|
||||
epgdict = self.get_epg()
|
||||
event_list = []
|
||||
for channel in list(epgdict.keys()):
|
||||
event_list.extend(epgdict[channel]["listing"])
|
||||
return next(item for item in event_list if item["id"] == event_id)
|
||||
|
||||
def epg_method_selfadd(self):
|
||||
for method in epgtype_list:
|
||||
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.config, self.channels, self.logger, self.web, self.db)"))
|
||||
|
||||
def update(self, method=None):
|
||||
|
||||
if not method:
|
||||
method = self.def_method
|
||||
if (method == self.config.dict["main"]["dictpopname"] or
|
||||
method not in self.config.dict["main"]["valid_epg_methods"]):
|
||||
method = "origin"
|
||||
|
||||
epgtypename = method
|
||||
if method in [self.config.dict["main"]["dictpopname"], "origin"]:
|
||||
epgtypename = self.config.dict["main"]["dictpopname"]
|
||||
|
||||
self.logger.info("Updating " + epgtypename + " EPG cache.")
|
||||
method_to_call = getattr(self, method)
|
||||
func_to_call = getattr(method_to_call, 'update_epg')
|
||||
if method == 'origin':
|
||||
programguide = func_to_call(self.channels)
|
||||
else:
|
||||
programguide = func_to_call()
|
||||
|
||||
for chan in list(programguide.keys()):
|
||||
floatnum = str(float(chan))
|
||||
programguide[floatnum] = programguide.pop(chan)
|
||||
programguide[floatnum]["number"] = floatnum
|
||||
|
||||
programguide = OrderedDict(sorted(programguide.items()))
|
||||
|
||||
for cnum in programguide:
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
self.epgdict = programguide
|
||||
self.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||
self.db.set_fhdhr_value("update_time", method, time.time())
|
||||
self.logger.info("Wrote " + epgtypename + " EPG cache.")
|
||||
|
||||
def run(self):
|
||||
for epg_method in self.epg_methods:
|
||||
self.update(epg_method)
|
||||
try:
|
||||
while True:
|
||||
for epg_method in self.epg_methods:
|
||||
if time.time() >= (self.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
|
||||
self.update(epg_method)
|
||||
time.sleep(3600)
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
@ -1,318 +0,0 @@
|
||||
import time
|
||||
import datetime
|
||||
import threading
|
||||
|
||||
from fHDHR.tools import channel_sort
|
||||
|
||||
from .blocks import blocksEPG
|
||||
|
||||
|
||||
class EPG():
|
||||
|
||||
def __init__(self, fhdhr, channels, origins):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.origins = origins
|
||||
self.channels = channels
|
||||
|
||||
self.epgdict = {}
|
||||
|
||||
self.epg_methods = self.fhdhr.config.dict["epg"]["method"] or []
|
||||
self.valid_epg_methods = [x for x in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()) if x and x not in [None, "None"]]
|
||||
|
||||
self.blocks = blocksEPG(self.fhdhr, self.channels, self.origins, None)
|
||||
self.epg_handling = {}
|
||||
self.epg_method_selfadd()
|
||||
|
||||
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||
self.sleeptime = {}
|
||||
for epg_method in self.epg_methods:
|
||||
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
||||
if epg_method not in list(self.sleeptime.keys()):
|
||||
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
||||
|
||||
self.epg_update_url = "/api/epg?method=update"
|
||||
|
||||
self.fhdhr.threads["epg"] = threading.Thread(target=self.run)
|
||||
|
||||
def clear_epg_cache(self, method=None):
|
||||
|
||||
if not method:
|
||||
if not self.def_method:
|
||||
return
|
||||
if method not in self.valid_epg_methods:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
|
||||
self.fhdhr.logger.info("Clearing %s EPG cache." % method)
|
||||
|
||||
if hasattr(self.epg_handling[method], 'clear_cache'):
|
||||
self.epg_handling[method].clear_cache()
|
||||
|
||||
if method in list(self.epgdict.keys()):
|
||||
del self.epgdict[method]
|
||||
|
||||
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
||||
|
||||
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
|
||||
nowtime = time.time()
|
||||
epgdict = self.get_epg(method)
|
||||
if channel_number not in list(epgdict.keys()):
|
||||
epgdict[channel_number] = {
|
||||
"callsign": "",
|
||||
"name": "",
|
||||
"number": str(channel_number),
|
||||
"id": "",
|
||||
"thumbnail": "",
|
||||
"listing": []
|
||||
}
|
||||
|
||||
for listing in epgdict[channel_number]["listing"]:
|
||||
for time_item in ["time_start", "time_end"]:
|
||||
time_value = listing[time_item]
|
||||
if str(time_value).endswith("+00:00"):
|
||||
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
||||
elif str(time_value).endswith("+0000"):
|
||||
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
||||
else:
|
||||
listing[time_item] = int(time_value)
|
||||
if int(listing["time_start"]) <= nowtime <= int(listing["time_end"]):
|
||||
epgitem = epgdict[channel_number].copy()
|
||||
epgitem["listing"] = [listing]
|
||||
return epgitem
|
||||
epgitem = epgdict[channel_number].copy()
|
||||
epgitem["listing"] = [self.blocks.empty_listing(chan_obj=None, chan_dict=None)]
|
||||
return epgitem
|
||||
|
||||
def whats_on_allchans(self, method=None):
|
||||
|
||||
if not method:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
if method not in self.valid_epg_methods:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
|
||||
channel_guide_dict = {}
|
||||
epgdict = self.get_epg(method)
|
||||
epgdict = epgdict.copy()
|
||||
for c in list(epgdict.keys()):
|
||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||
channel_number = chan_obj.number
|
||||
epgdict[channel_number] = epgdict.pop(c)
|
||||
epgdict[channel_number]["name"] = chan_obj.dict["name"]
|
||||
epgdict[channel_number]["callsign"] = chan_obj.dict["callsign"]
|
||||
epgdict[channel_number]["number"] = chan_obj.number
|
||||
epgdict[channel_number]["id"] = chan_obj.dict["origin_id"]
|
||||
epgdict[channel_number]["thumbnail"] = chan_obj.thumbnail
|
||||
else:
|
||||
chan_obj = None
|
||||
channel_number = c
|
||||
whatson = self.whats_on_now(channel_number, method, chan_dict=epgdict, chan_obj=chan_obj)
|
||||
if whatson:
|
||||
channel_guide_dict[channel_number] = whatson
|
||||
return channel_guide_dict
|
||||
|
||||
def get_epg(self, method=None):
|
||||
|
||||
if not method:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
if method not in self.valid_epg_methods:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
|
||||
if method in list(self.epgdict.keys()):
|
||||
return self.epgdict[method]
|
||||
|
||||
self.update(method)
|
||||
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
||||
return self.epgdict[method]
|
||||
|
||||
def get_thumbnail(self, itemtype, itemid):
|
||||
if itemtype == "channel":
|
||||
chandict = self.find_channel_dict(itemid)
|
||||
return chandict["thumbnail"]
|
||||
elif itemtype == "content":
|
||||
progdict = self.find_program_dict(itemid)
|
||||
return progdict["thumbnail"]
|
||||
return None
|
||||
|
||||
def find_channel_dict(self, channel_id):
|
||||
epgdict = self.get_epg()
|
||||
channel_list = [epgdict[x] for x in list(epgdict.keys())]
|
||||
return next(item for item in channel_list if item["id"] == channel_id) or None
|
||||
|
||||
def find_program_dict(self, event_id):
|
||||
epgdict = self.get_epg()
|
||||
event_list = []
|
||||
for channel in list(epgdict.keys()):
|
||||
event_list.extend(epgdict[channel]["listing"])
|
||||
return next(item for item in event_list if item["id"] == event_id) or None
|
||||
|
||||
def epg_method_selfadd(self):
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_epg":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||
self.epg_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.channels, self.fhdhr.plugins.plugins[plugin_name].plugin_utils)
|
||||
for origin in list(self.origins.origins_dict.keys()):
|
||||
if origin.lower() not in list(self.epg_handling.keys()):
|
||||
self.epg_handling[origin.lower()] = blocksEPG(self.fhdhr, self.channels, self.origins, origin)
|
||||
self.fhdhr.config.register_valid_epg_method(origin, "Blocks")
|
||||
self.valid_epg_methods.append(origin.lower())
|
||||
|
||||
def update(self, method=None):
|
||||
|
||||
if not method:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
if method not in self.valid_epg_methods:
|
||||
if not self.def_method:
|
||||
return
|
||||
method = self.def_method
|
||||
|
||||
self.fhdhr.logger.info("Updating %s EPG cache." % method)
|
||||
programguide = self.epg_handling[method].update_epg()
|
||||
|
||||
# sort the channel listings by time stamp
|
||||
for cnum in list(programguide.keys()):
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
|
||||
# Gernate Block periods for between EPG data, if missing
|
||||
clean_prog_guide = {}
|
||||
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
||||
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
||||
for cnum in list(programguide.keys()):
|
||||
|
||||
if cnum not in list(clean_prog_guide.keys()):
|
||||
clean_prog_guide[cnum] = programguide[cnum].copy()
|
||||
clean_prog_guide[cnum]["listing"] = []
|
||||
|
||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
|
||||
else:
|
||||
chan_obj = None
|
||||
|
||||
# Generate Blocks for Channels containing No Lisiings
|
||||
if not len(programguide[cnum]["listing"]):
|
||||
timestamps = self.blocks.timestamps_between(desired_start_time, desired_end_time)
|
||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||
|
||||
else:
|
||||
|
||||
# Clean Timetamps from old xmltv method to timestamps
|
||||
progindex = 0
|
||||
for program_item in programguide[cnum]["listing"]:
|
||||
for time_item in ["time_start", "time_end"]:
|
||||
time_value = programguide[cnum]["listing"][progindex][time_item]
|
||||
if str(time_value).endswith("+00:00"):
|
||||
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
||||
elif str(time_value).endswith("+0000"):
|
||||
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
||||
else:
|
||||
programguide[cnum]["listing"][progindex][time_item] = int(time_value)
|
||||
progindex += 1
|
||||
|
||||
# Generate time before the listing actually starts
|
||||
first_prog_time = programguide[cnum]["listing"][0]['time_start']
|
||||
if desired_start_time < first_prog_time:
|
||||
timestamps = self.blocks.timestamps_between(desired_start_time, first_prog_time)
|
||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||
|
||||
# Generate time blocks between events if chunks of time are missing
|
||||
progindex = 0
|
||||
for program_item in programguide[cnum]["listing"]:
|
||||
try:
|
||||
nextprog_dict = programguide[cnum]["listing"][progindex + 1]
|
||||
except IndexError:
|
||||
nextprog_dict = None
|
||||
if not nextprog_dict:
|
||||
clean_prog_guide[cnum]["listing"].append(program_item)
|
||||
else:
|
||||
if nextprog_dict['time_start'] > program_item['time_end']:
|
||||
timestamps = self.blocks.timestamps_between(program_item['time_end'], nextprog_dict['time_start'])
|
||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||
else:
|
||||
clean_prog_guide[cnum]["listing"].append(program_item)
|
||||
progindex += 1
|
||||
|
||||
# Generate time after the listing actually ends
|
||||
end_prog_time = programguide[cnum]["listing"][progindex]['time_end']
|
||||
if desired_end_time > end_prog_time:
|
||||
timestamps = self.blocks.timestamps_between(end_prog_time, desired_end_time)
|
||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||
|
||||
programguide = clean_prog_guide.copy()
|
||||
|
||||
# if a stock method, generate Blocks EPG for missing channels
|
||||
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
|
||||
timestamps = self.blocks.timestamps
|
||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels(method)]:
|
||||
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, method)
|
||||
if str(chan_obj.number) not in list(programguide.keys()):
|
||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
||||
programguide[str(chan_obj.number)]["listing"].extend(clean_prog_dicts)
|
||||
|
||||
# Make Thumbnails for missing thumbnails
|
||||
for cnum in list(programguide.keys()):
|
||||
if not programguide[cnum]["thumbnail"]:
|
||||
programguide[cnum]["thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % programguide[cnum]["number"]
|
||||
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||
prog_index = 0
|
||||
for program_item in programguide[cnum]["listing"]:
|
||||
if not programguide[cnum]["listing"][prog_index]["thumbnail"]:
|
||||
programguide[cnum]["listing"][prog_index]["thumbnail"] = programguide[cnum]["thumbnail"]
|
||||
prog_index += 1
|
||||
|
||||
# Get Totals
|
||||
total_channels = len(list(programguide.keys()))
|
||||
total_programs = 0
|
||||
|
||||
# Sort the channels
|
||||
sorted_channel_list = channel_sort(list(programguide.keys()))
|
||||
sorted_chan_guide = {}
|
||||
for channel in sorted_channel_list:
|
||||
total_programs += len(programguide[cnum]["listing"])
|
||||
sorted_chan_guide[channel] = programguide[channel]
|
||||
|
||||
self.epgdict[method] = sorted_chan_guide
|
||||
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (method, total_programs, total_channels))
|
||||
|
||||
def start(self):
|
||||
self.fhdhr.logger.info("EPG Update Thread Starting")
|
||||
self.fhdhr.threads["epg"].start()
|
||||
|
||||
def stop(self):
|
||||
self.fhdhr.logger.info("EPG Update Thread Stopping")
|
||||
|
||||
def run(self):
|
||||
time.sleep(1800)
|
||||
while True:
|
||||
for epg_method in self.epg_methods:
|
||||
last_update_time = self.fhdhr.db.get_fhdhr_value("update_time", epg_method)
|
||||
updatetheepg = False
|
||||
if not last_update_time:
|
||||
updatetheepg = True
|
||||
elif time.time() >= (last_update_time + self.sleeptime[epg_method]):
|
||||
updatetheepg = True
|
||||
if updatetheepg:
|
||||
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
||||
time.sleep(1800)
|
||||
|
||||
self.stop()
|
||||
@ -1,120 +0,0 @@
|
||||
import datetime
|
||||
|
||||
|
||||
class blocksEPG():
|
||||
|
||||
def __init__(self, fhdhr, channels, origins, origin):
|
||||
self.fhdhr = fhdhr
|
||||
self.channels = channels
|
||||
self.origins = origins
|
||||
self.origin = origin
|
||||
|
||||
def update_epg(self):
|
||||
programguide = {}
|
||||
|
||||
timestamps = self.timestamps
|
||||
|
||||
for fhdhr_id in [x["id"] for x in self.channels.get_channels(self.origin)]:
|
||||
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, self.origin)
|
||||
|
||||
if str(chan_obj.number) not in list(programguide.keys()):
|
||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||
|
||||
clean_prog_dicts = self.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
||||
for clean_prog_dict in clean_prog_dicts:
|
||||
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
|
||||
|
||||
return programguide
|
||||
|
||||
@property
|
||||
def timestamps(self):
|
||||
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
||||
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
||||
return self.timestamps_between(desired_start_time, desired_end_time)
|
||||
|
||||
def timestamps_between(self, starttime, endtime):
|
||||
timestamps = []
|
||||
desired_blocksize = self.fhdhr.config.dict["epg"]["block_size"]
|
||||
current_time = starttime
|
||||
while (current_time + desired_blocksize) <= endtime:
|
||||
timestampdict = {
|
||||
"time_start": current_time,
|
||||
"time_end": current_time + desired_blocksize,
|
||||
}
|
||||
timestamps.append(timestampdict)
|
||||
current_time += desired_blocksize
|
||||
if current_time < endtime:
|
||||
timestampdict = {
|
||||
"time_start": current_time,
|
||||
"time_end": endtime
|
||||
}
|
||||
timestamps.append(timestampdict)
|
||||
return timestamps
|
||||
|
||||
def single_channel_epg(self, timestampdict, chan_obj=None, chan_dict=None):
|
||||
|
||||
if chan_obj:
|
||||
content_id = "%s_%s" % (chan_obj.dict["origin_id"], timestampdict['time_start'])
|
||||
elif chan_dict:
|
||||
content_id = "%s_%s" % (chan_dict["id"], timestampdict['time_start'])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": timestampdict['time_start'],
|
||||
"time_end": timestampdict['time_end'],
|
||||
"duration_minutes": (timestampdict['time_end'] - timestampdict['time_start']) / 60,
|
||||
"title": "Unavailable",
|
||||
"sub-title": "Unavailable",
|
||||
"description": "Unavailable",
|
||||
"rating": "N/A",
|
||||
"episodetitle": None,
|
||||
"releaseyear": None,
|
||||
"genres": [],
|
||||
"seasonnumber": None,
|
||||
"episodenumber": None,
|
||||
"isnew": False,
|
||||
"id": content_id,
|
||||
}
|
||||
if chan_obj:
|
||||
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
||||
elif chan_dict:
|
||||
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
||||
if not clean_prog_dict["thumbnail"]:
|
||||
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=%s" % content_id
|
||||
|
||||
return clean_prog_dict
|
||||
|
||||
def empty_channel_epg(self, timestamps, chan_obj=None, chan_dict=None):
|
||||
clean_prog_dicts = []
|
||||
for timestampdict in timestamps:
|
||||
clean_prog_dict = self.single_channel_epg(timestampdict, chan_obj=chan_obj, chan_dict=chan_dict)
|
||||
clean_prog_dicts.append(clean_prog_dict)
|
||||
return clean_prog_dicts
|
||||
|
||||
def empty_listing(self, chan_obj=None, chan_dict=None):
|
||||
clean_prog_dict = {
|
||||
"time_start": None,
|
||||
"time_end": None,
|
||||
"duration_minutes": None,
|
||||
"title": "Unavailable",
|
||||
"sub-title": "Unavailable",
|
||||
"description": "Unavailable",
|
||||
"rating": "N/A",
|
||||
"episodetitle": None,
|
||||
"releaseyear": None,
|
||||
"genres": [],
|
||||
"seasonnumber": None,
|
||||
"episodenumber": None,
|
||||
"isnew": False,
|
||||
"id": "Unavailable",
|
||||
}
|
||||
|
||||
if chan_obj:
|
||||
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
||||
elif chan_dict:
|
||||
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
||||
else:
|
||||
clean_prog_dict["thumbnail"] = None
|
||||
if not clean_prog_dict["thumbnail"]:
|
||||
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=Unavailable"
|
||||
|
||||
return clean_prog_dict
|
||||
0
fHDHR/device/epgtypes/__init__.py
Normal file
65
fHDHR/device/epgtypes/blocks.py
Normal file
@ -0,0 +1,65 @@
|
||||
import datetime
|
||||
|
||||
|
||||
class blocksEPG():
|
||||
|
||||
def __init__(self, settings, channels, logger, web, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.channels = channels
|
||||
self.db = db
|
||||
|
||||
def update_epg(self):
|
||||
programguide = {}
|
||||
|
||||
timestamps = []
|
||||
todaydate = datetime.date.today()
|
||||
for x in range(0, 6):
|
||||
xdate = todaydate + datetime.timedelta(days=x)
|
||||
xtdate = xdate + datetime.timedelta(days=1)
|
||||
|
||||
for hour in range(0, 24):
|
||||
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
|
||||
if hour + 1 < 24:
|
||||
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
|
||||
else:
|
||||
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
|
||||
timestampdict = {
|
||||
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||
}
|
||||
timestamps.append(timestampdict)
|
||||
|
||||
for c in self.channels.get_channels():
|
||||
if str(c["number"]) not in list(programguide.keys()):
|
||||
programguide[str(c["number"])] = {
|
||||
"callsign": c["callsign"],
|
||||
"name": c["name"],
|
||||
"number": c["number"],
|
||||
"id": c["id"],
|
||||
"thumbnail": ("/api/images?method=generate&type=channel&message=%s" % (str(c['number']))),
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
for timestamp in timestamps:
|
||||
clean_prog_dict = {
|
||||
"time_start": timestamp['time_start'],
|
||||
"time_end": timestamp['time_end'],
|
||||
"duration_minutes": 60,
|
||||
"thumbnail": ("/api/images?method=generate&type=content&message=%s" % (str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
|
||||
"title": "Unavailable",
|
||||
"sub-title": "Unavailable",
|
||||
"description": "Unavailable",
|
||||
"rating": "N/A",
|
||||
"episodetitle": None,
|
||||
"releaseyear": None,
|
||||
"genres": [],
|
||||
"seasonnumber": None,
|
||||
"episodenumber": None,
|
||||
"isnew": False,
|
||||
"id": str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0],
|
||||
}
|
||||
|
||||
programguide[str(c["number"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
return programguide
|
||||
157
fHDHR/device/epgtypes/zap2it.py
Normal file
@ -0,0 +1,157 @@
|
||||
import json
|
||||
import time
|
||||
import datetime
|
||||
import urllib.parse
|
||||
|
||||
from fHDHR.tools import xmldictmaker
|
||||
from fHDHR.exceptions import EPGSetupError
|
||||
|
||||
|
||||
class zap2itEPG():
|
||||
|
||||
def __init__(self, settings, channels, logger, web, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.channels = channels
|
||||
self.web = web
|
||||
self.db = db
|
||||
|
||||
self.postalcode = self.config.dict["zap2it"]["postalcode"]
|
||||
|
||||
self.web_cache_dir = self.config.dict["filedir"]["epg_cache"]["zap2it"]["web_cache"]
|
||||
|
||||
def get_location(self):
|
||||
self.logger.warning("Zap2it postalcode not set, attempting to retrieve.")
|
||||
if not self.postalcode:
|
||||
try:
|
||||
postalcode_url = 'http://ipinfo.io/json'
|
||||
postalcode_req = self.web.session.get(postalcode_url)
|
||||
data = postalcode_req.json()
|
||||
self.postalcode = data["postal"]
|
||||
except Exception as e:
|
||||
raise EPGSetupError("Unable to automatically optain zap2it postalcode: " + str(e))
|
||||
return self.postalcode
|
||||
|
||||
def update_epg(self):
|
||||
programguide = {}
|
||||
|
||||
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
|
||||
zap_time = datetime.datetime.utcnow().timestamp()
|
||||
zap_time_window = int(self.config.dict["zap2it"]["timespan"]) * 3600
|
||||
zap_time = int(zap_time - (zap_time % zap_time_window))
|
||||
|
||||
self.remove_stale_cache(zap_time)
|
||||
|
||||
# Fetch data in `zap_timespan` chunks.
|
||||
for i in range(int(7 * 24 / int(self.config.dict["zap2it"]["timespan"]))):
|
||||
i_time = zap_time + (i * zap_time_window)
|
||||
|
||||
parameters = {
|
||||
'aid': self.config.dict["zap2it"]['affiliate_id'],
|
||||
'country': self.config.dict["zap2it"]['country'],
|
||||
'device': self.config.dict["zap2it"]['device'],
|
||||
'headendId': self.config.dict["zap2it"]['headendid'],
|
||||
'isoverride': "true",
|
||||
'languagecode': self.config.dict["zap2it"]['languagecode'],
|
||||
'pref': 'm,p',
|
||||
'timespan': self.config.dict["zap2it"]['timespan'],
|
||||
'timezone': self.config.dict["zap2it"]['timezone'],
|
||||
'userId': self.config.dict["zap2it"]['userid'],
|
||||
'postalCode': str(self.postalcode or self.get_location()),
|
||||
'lineupId': '%s-%s-DEFAULT' % (self.config.dict["zap2it"]['country'], self.config.dict["zap2it"]['device']),
|
||||
'time': i_time,
|
||||
'Activity_ID': 1,
|
||||
'FromPage': "TV%20Guide",
|
||||
}
|
||||
|
||||
url = 'https://tvlistings.zap2it.com/api/grid?'
|
||||
url += urllib.parse.urlencode(parameters)
|
||||
|
||||
result = self.get_cached(str(i_time), self.config.dict["zap2it"]['delay'], url)
|
||||
d = json.loads(result)
|
||||
|
||||
for c in d['channels']:
|
||||
|
||||
cdict = xmldictmaker(c, ["callSign", "name", "channelNo", "channelId", "thumbnail"])
|
||||
|
||||
if str(cdict['channelNo']) not in list(programguide.keys()):
|
||||
|
||||
programguide[str(cdict['channelNo'])] = {
|
||||
"callsign": cdict["callSign"],
|
||||
"name": cdict["name"] or cdict["callSign"], # TODO
|
||||
"number": str(cdict["channelNo"]),
|
||||
"id": str(cdict["channelId"]),
|
||||
"thumbnail": str(cdict['thumbnail']).replace("//", "https://").split("?")[0],
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
for event in c['events']:
|
||||
|
||||
eventdict = xmldictmaker(event, ["startTime", "endTime", "duration", "rating", "flag"], list_items=["filter", "flag"])
|
||||
progdict = xmldictmaker(event['program'], ["title", "sub-title", "releaseYear", "episodeTitle", "shortDesc", "season", "episode", "id"])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": self.xmltimestamp_zap(eventdict['startTime']),
|
||||
"time_end": self.xmltimestamp_zap(eventdict['endTime']),
|
||||
"duration_minutes": eventdict['duration'],
|
||||
"thumbnail": str("https://zap2it.tmsimg.com/assets/" + str(eventdict['thumbnail']) + ".jpg"),
|
||||
"title": progdict['title'] or "Unavailable",
|
||||
"sub-title": progdict['sub-title'] or "Unavailable",
|
||||
"description": progdict['shortDesc'] or "Unavailable",
|
||||
"rating": eventdict['rating'] or "N/A",
|
||||
"episodetitle": progdict['episodeTitle'],
|
||||
"releaseyear": progdict['releaseYear'],
|
||||
"genres": [],
|
||||
"seasonnumber": progdict['season'],
|
||||
"episodenumber": progdict['episode'],
|
||||
"isnew": False,
|
||||
"id": str(progdict['id'] or self.xmltimestamp_zap(eventdict['startTime'])),
|
||||
}
|
||||
|
||||
for f in eventdict['filter']:
|
||||
clean_prog_dict["genres"].append(f.replace('filter-', ''))
|
||||
|
||||
if 'movie' in clean_prog_dict['genres'] and clean_prog_dict['releaseyear']:
|
||||
clean_prog_dict["sub-title"] = 'Movie: ' + clean_prog_dict['releaseyear']
|
||||
elif clean_prog_dict['episodetitle']:
|
||||
clean_prog_dict["sub-title"] = clean_prog_dict['episodetitle']
|
||||
|
||||
if 'New' in eventdict['flag'] and 'live' not in eventdict['flag']:
|
||||
clean_prog_dict["isnew"] = True
|
||||
|
||||
programguide[str(cdict["channelNo"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
return programguide
|
||||
|
||||
def xmltimestamp_zap(self, inputtime):
|
||||
xmltime = inputtime.replace('Z', '+00:00')
|
||||
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||
return xmltime
|
||||
|
||||
def get_cached(self, cache_key, delay, url):
|
||||
cache_path = self.web_cache_dir.joinpath(cache_key)
|
||||
if cache_path.is_file():
|
||||
self.logger.info('FROM CACHE: ' + str(cache_path))
|
||||
with open(cache_path, 'rb') as f:
|
||||
return f.read()
|
||||
else:
|
||||
self.logger.info('Fetching: ' + url)
|
||||
resp = self.web.session.get(url)
|
||||
result = resp.content
|
||||
with open(cache_path, 'wb') as f:
|
||||
f.write(result)
|
||||
time.sleep(int(delay))
|
||||
return result
|
||||
|
||||
def remove_stale_cache(self, zap_time):
|
||||
for p in self.web_cache_dir.glob('*'):
|
||||
try:
|
||||
t = int(p.name)
|
||||
if t >= zap_time:
|
||||
continue
|
||||
except Exception as e:
|
||||
self.logger.error(e)
|
||||
pass
|
||||
self.logger.info('Removing stale cache file: ' + p.name)
|
||||
p.unlink()
|
||||
@ -6,15 +6,18 @@ import PIL.ImageFont
|
||||
|
||||
class imageHandler():
|
||||
|
||||
def __init__(self, fhdhr, epg):
|
||||
self.fhdhr = fhdhr
|
||||
def __init__(self, settings, epg, logger, web):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.epg = epg
|
||||
self.web = web
|
||||
|
||||
def get_epg_image(self, image_type, content_id):
|
||||
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
|
||||
if not imageUri:
|
||||
return self.generate_image(image_type, str(content_id))
|
||||
|
||||
req = self.fhdhr.web.session.get(imageUri)
|
||||
req = self.web.session.get(imageUri)
|
||||
return req.content
|
||||
|
||||
def getSize(self, txt, font):
|
||||
@ -35,7 +38,7 @@ class imageHandler():
|
||||
colorBackground = "#228822"
|
||||
colorText = "#717D7E"
|
||||
colorOutline = "#717D7E"
|
||||
fontname = str(self.fhdhr.config.internal["paths"]["font"])
|
||||
fontname = str(self.config.dict["filedir"]["font"])
|
||||
|
||||
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||
text_width, text_height = self.getSize(message, font)
|
||||
|
||||
192
fHDHR/device/ssdp.py
Normal file
@ -0,0 +1,192 @@
|
||||
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||
import socket
|
||||
import struct
|
||||
|
||||
|
||||
class fHDHR_Detect():
|
||||
|
||||
def __init__(self, settings, logger, db):
|
||||
self.config = settings
|
||||
self.db = db
|
||||
self.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||
|
||||
def set(self, location):
|
||||
detect_list = self.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||
if location not in detect_list:
|
||||
detect_list.append(location)
|
||||
self.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||
|
||||
def get(self):
|
||||
return self.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||
|
||||
|
||||
class SSDPServer():
|
||||
|
||||
def __init__(self, settings, fhdhr_version, logger, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.db = db
|
||||
|
||||
self.detect_method = fHDHR_Detect(settings, logger, db)
|
||||
|
||||
if settings.dict["fhdhr"]["discovery_address"]:
|
||||
|
||||
self.sock = None
|
||||
self.proto = "ipv4"
|
||||
self.port = 1900
|
||||
self.iface = None
|
||||
self.address = None
|
||||
self.server = 'fHDHR/%s UPnP/1.0' % fhdhr_version
|
||||
|
||||
allowed_protos = ("ipv4", "ipv6")
|
||||
if self.proto not in allowed_protos:
|
||||
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||
|
||||
self.nt = 'urn:schemas-upnp-org:device:MediaServer:1'
|
||||
self.usn = 'uuid:' + settings.dict["main"]["uuid"] + '::' + self.nt
|
||||
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
|
||||
str(settings.dict["fhdhr"]["port"]) + '/device.xml')
|
||||
self.al = self.location
|
||||
self.max_age = 1800
|
||||
self._iface = None
|
||||
|
||||
if self.proto == "ipv4":
|
||||
self._af_type = socket.AF_INET
|
||||
self._broadcast_ip = "239.255.255.250"
|
||||
self._address = (self._broadcast_ip, self.port)
|
||||
self.bind_address = "0.0.0.0"
|
||||
elif self.proto == "ipv6":
|
||||
self._af_type = socket.AF_INET6
|
||||
self._broadcast_ip = "ff02::c"
|
||||
self._address = (self._broadcast_ip, self.port, 0, 0)
|
||||
self.bind_address = "::"
|
||||
|
||||
self.broadcast_addy = "{}:{}".format(self._broadcast_ip, self.port)
|
||||
|
||||
self.sock = socket.socket(self._af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
# Bind to specific interface
|
||||
if self.iface is not None:
|
||||
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||
|
||||
# Subscribe to multicast address
|
||||
if self.proto == "ipv4":
|
||||
mreq = socket.inet_aton(self._broadcast_ip)
|
||||
if self.address is not None:
|
||||
mreq += socket.inet_aton(self.address)
|
||||
else:
|
||||
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||
self.sock.setsockopt(
|
||||
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq,
|
||||
)
|
||||
# Allow multicasts on loopback devices (necessary for testing)
|
||||
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||
elif self.proto == "ipv6":
|
||||
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||
mreq = socket.inet_pton(socket.AF_INET6, self._broadcast_ip)
|
||||
if self.iface is not None:
|
||||
iface_index = socket.if_nametoindex(self.iface)
|
||||
# Send outgoing packets from the same interface
|
||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||
mreq += struct.pack(b"@I", iface_index)
|
||||
else:
|
||||
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||
self.sock.setsockopt(
|
||||
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||
)
|
||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||
self.sock.bind((self.bind_address, self.port))
|
||||
|
||||
self.notify_payload = self.create_notify_payload()
|
||||
self.msearch_payload = self.create_msearch_payload()
|
||||
|
||||
self.m_search()
|
||||
|
||||
def on_recv(self, data, address):
|
||||
self.logger.debug("Received packet from {}: {}".format(address, data))
|
||||
|
||||
(host, port) = address
|
||||
|
||||
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||
|
||||
lines = header.split('\r\n')
|
||||
cmd = lines[0].split(' ')
|
||||
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||
lines = filter(lambda x: len(x) > 0, lines)
|
||||
|
||||
headers = [x.split(':', 1) for x in lines]
|
||||
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||
|
||||
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||
# SSDP discovery
|
||||
self.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||
self.logger.debug("M-SEARCH data: {}".format(headers))
|
||||
notify = self.notify_payload
|
||||
self.logger.debug("Created NOTIFY: {}".format(notify))
|
||||
try:
|
||||
self.sock.sendto(notify, address)
|
||||
except OSError as e:
|
||||
# Most commonly: We received a multicast from an IP not in our subnet
|
||||
self.logger.debug("Unable to send NOTIFY to {}: {}".format(address, e))
|
||||
pass
|
||||
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||
# SSDP presence
|
||||
self.logger.debug("NOTIFY data: {}".format(headers))
|
||||
if headers["server"].startswith("fHDHR"):
|
||||
if headers["location"] != self.location:
|
||||
self.detect_method.set(headers["location"].split("/device.xml")[0])
|
||||
else:
|
||||
self.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||
|
||||
def m_search(self):
|
||||
data = self.msearch_payload
|
||||
self.sock.sendto(data, self._address)
|
||||
|
||||
def create_notify_payload(self):
|
||||
if self.max_age is not None and not isinstance(self.max_age, int):
|
||||
raise ValueError("max_age must by of type: int")
|
||||
data = (
|
||||
"NOTIFY * HTTP/1.1\r\n"
|
||||
"HOST:{}\r\n"
|
||||
"NT:{}\r\n"
|
||||
"NTS:ssdp:alive\r\n"
|
||||
"USN:{}\r\n"
|
||||
"SERVER:{}\r\n"
|
||||
).format(
|
||||
self._broadcast_ip,
|
||||
self.nt,
|
||||
self.usn,
|
||||
self.server
|
||||
)
|
||||
if self.location is not None:
|
||||
data += "LOCATION:{}\r\n".format(self.location)
|
||||
if self.al is not None:
|
||||
data += "AL:{}\r\n".format(self.al)
|
||||
if self.max_age is not None:
|
||||
data += "Cache-Control:max-age={}\r\n".format(self.max_age)
|
||||
data += "\r\n"
|
||||
return data.encode("utf-8")
|
||||
|
||||
def create_msearch_payload(self):
|
||||
data = (
|
||||
"M-SEARCH * HTTP/1.1\r\n"
|
||||
"HOST:{}\r\n"
|
||||
'MAN: "ssdp:discover"\r\n'
|
||||
"ST:{}\r\n"
|
||||
"MX:{}\r\n"
|
||||
).format(
|
||||
self.broadcast_addy,
|
||||
"ssdp:all",
|
||||
1
|
||||
)
|
||||
data += "\r\n"
|
||||
return data.encode("utf-8")
|
||||
|
||||
def run(self):
|
||||
try:
|
||||
while True:
|
||||
data, address = self.sock.recvfrom(1024)
|
||||
self.on_recv(data, address)
|
||||
except KeyboardInterrupt:
|
||||
self.sock.close()
|
||||
@ -1,214 +0,0 @@
|
||||
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||
import socket
|
||||
import struct
|
||||
import time
|
||||
import threading
|
||||
|
||||
|
||||
class SSDPServer():
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.ssdp_handling = {}
|
||||
self.methods = [x for x in list(self.fhdhr.plugins.plugins.keys()) if self.fhdhr.plugins.plugins[x].type == "ssdp"]
|
||||
|
||||
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
||||
self.fhdhr.config.dict["ssdp"]["enabled"] and
|
||||
len(self.methods)):
|
||||
|
||||
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
|
||||
self.setup_ssdp()
|
||||
|
||||
self.sock.bind((self.bind_address, 1900))
|
||||
|
||||
self.msearch_payload = self.create_msearch_payload()
|
||||
|
||||
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
||||
self.age_time = None
|
||||
|
||||
self.ssdp_method_selfadd()
|
||||
|
||||
self.do_alive()
|
||||
self.m_search()
|
||||
|
||||
def ssdp_method_selfadd(self):
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].type == "ssdp":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||
self.ssdp_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils, self.broadcast_ip, self.max_age)
|
||||
|
||||
def start(self):
|
||||
self.fhdhr.logger.info("SSDP Server Starting")
|
||||
self.fhdhr.threads["ssdp"].start()
|
||||
|
||||
def stop(self):
|
||||
self.fhdhr.logger.info("SSDP Server Stopping")
|
||||
self.sock.close()
|
||||
|
||||
def run(self):
|
||||
while True:
|
||||
data, address = self.sock.recvfrom(1024)
|
||||
self.on_recv(data, address)
|
||||
self.do_alive()
|
||||
self.stop()
|
||||
|
||||
def do_alive(self, forcealive=False):
|
||||
|
||||
send_alive = False
|
||||
if not self.age_time:
|
||||
send_alive = True
|
||||
elif forcealive:
|
||||
send_alive = True
|
||||
elif time.time() >= (self.age_time + self.max_age):
|
||||
send_alive = True
|
||||
|
||||
if send_alive:
|
||||
self.fhdhr.logger.info("Sending Alive message to network.")
|
||||
self.do_notify(self.broadcast_address_tuple)
|
||||
self.age_time = time.time()
|
||||
|
||||
def do_notify(self, address):
|
||||
|
||||
notify_list = []
|
||||
for ssdp_handler in list(self.ssdp_handling.keys()):
|
||||
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'notify'):
|
||||
notify_data = self.ssdp_handling[ssdp_handler].notify
|
||||
if isinstance(notify_data, list):
|
||||
notify_list.extend(notify_data)
|
||||
else:
|
||||
notify_list.append(notify_data)
|
||||
|
||||
for notifydata in notify_list:
|
||||
notifydata = notifydata.encode("utf-8")
|
||||
|
||||
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
||||
try:
|
||||
self.sock.sendto(notifydata, address)
|
||||
except OSError as e:
|
||||
# Most commonly: We received a multicast from an IP not in our subnet
|
||||
self.fhdhr.logger.debug("Unable to send NOTIFY: %s" % e)
|
||||
pass
|
||||
|
||||
def on_recv(self, data, address):
|
||||
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
||||
|
||||
try:
|
||||
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||
except ValueError:
|
||||
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
||||
return
|
||||
|
||||
lines = header.split('\r\n')
|
||||
cmd = lines[0].split(' ')
|
||||
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||
lines = filter(lambda x: len(x) > 0, lines)
|
||||
|
||||
headers = [x.split(':', 1) for x in lines]
|
||||
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||
|
||||
for ssdp_handler in list(self.ssdp_handling.keys()):
|
||||
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'on_recv'):
|
||||
self.ssdp_handling[ssdp_handler].on_recv(headers, cmd, list(self.ssdp_handling.keys()))
|
||||
|
||||
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||
# SSDP discovery
|
||||
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
||||
|
||||
self.do_notify(address)
|
||||
|
||||
if cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||
else:
|
||||
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||
|
||||
def m_search(self):
|
||||
data = self.msearch_payload
|
||||
self.sock.sendto(data, self.broadcast_address_tuple)
|
||||
|
||||
def create_msearch_payload(self):
|
||||
|
||||
data = ''
|
||||
data_command = "M-SEARCH * HTTP/1.1"
|
||||
|
||||
data_dict = {
|
||||
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
||||
"MAN": "ssdp:discover",
|
||||
"ST": "ssdp:all",
|
||||
"MX": 1,
|
||||
}
|
||||
|
||||
data += "%s\r\n" % data_command
|
||||
for data_key in list(data_dict.keys()):
|
||||
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||
data += "\r\n"
|
||||
|
||||
return data.encode("utf-8")
|
||||
|
||||
def setup_ssdp(self):
|
||||
self.sock = None
|
||||
|
||||
self.proto = self.setup_proto()
|
||||
self.iface = self.fhdhr.config.dict["ssdp"]["iface"]
|
||||
self.address = self.fhdhr.config.dict["ssdp"]["multicast_address"]
|
||||
self.setup_addressing()
|
||||
|
||||
self.sock = socket.socket(self.af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||
|
||||
self.setup_interface()
|
||||
|
||||
self.setup_multicasting()
|
||||
|
||||
def setup_proto(self):
|
||||
proto = self.fhdhr.config.dict["ssdp"]["proto"]
|
||||
allowed_protos = ("ipv4", "ipv6")
|
||||
if proto not in allowed_protos:
|
||||
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||
return proto
|
||||
|
||||
def setup_addressing(self):
|
||||
if self.proto == "ipv4":
|
||||
self.af_type = socket.AF_INET
|
||||
self.broadcast_ip = "239.255.255.250"
|
||||
self.broadcast_address_tuple = (self.broadcast_ip, 1900)
|
||||
self.bind_address = "0.0.0.0"
|
||||
elif self.proto == "ipv6":
|
||||
self.af_type = socket.AF_INET6
|
||||
self.broadcast_ip = "ff02::c"
|
||||
self.broadcast_address_tuple = (self.broadcast_ip, 1900, 0, 0)
|
||||
self.bind_address = "::"
|
||||
|
||||
def setup_interface(self):
|
||||
# Bind to specific interface
|
||||
if self.iface is not None:
|
||||
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||
|
||||
def setup_multicasting(self):
|
||||
# Subscribe to multicast address
|
||||
if self.proto == "ipv4":
|
||||
mreq = socket.inet_aton(self.broadcast_ip)
|
||||
if self.address is not None:
|
||||
mreq += socket.inet_aton(self.address)
|
||||
else:
|
||||
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||
self.sock.setsockopt(
|
||||
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
||||
# Allow multicasts on loopback devices (necessary for testing)
|
||||
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||
elif self.proto == "ipv6":
|
||||
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||
mreq = socket.inet_pton(socket.AF_INET6, self.broadcast_ip)
|
||||
if self.iface is not None:
|
||||
iface_index = socket.if_nametoindex(self.iface)
|
||||
# Send outgoing packets from the same interface
|
||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||
mreq += struct.pack(b"@I", iface_index)
|
||||
else:
|
||||
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||
self.sock.setsockopt(
|
||||
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||
)
|
||||
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||
34
fHDHR/device/station_scan.py
Normal file
@ -0,0 +1,34 @@
|
||||
from multiprocessing import Process
|
||||
|
||||
|
||||
class Station_Scan():
|
||||
|
||||
def __init__(self, settings, channels, logger, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.channels = channels
|
||||
self.db = db
|
||||
self.db.delete_fhdhr_value("station_scan", "scanning")
|
||||
|
||||
def scan(self):
|
||||
self.logger.info("Channel Scan Requested by Client.")
|
||||
|
||||
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
|
||||
if not scan_status:
|
||||
self.db.set_fhdhr_value("station_scan", "scanning", 1)
|
||||
chanscan = Process(target=self.runscan)
|
||||
chanscan.start()
|
||||
else:
|
||||
self.logger.info("Channel Scan Already In Progress!")
|
||||
|
||||
def runscan(self):
|
||||
self.channels.get_channels(forceupdate=True)
|
||||
self.logger.info("Requested Channel Scan Complete.")
|
||||
self.db.delete_fhdhr_value("station_scan", "scanning")
|
||||
|
||||
def scanning(self):
|
||||
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
|
||||
if not scan_status:
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
108
fHDHR/device/tuners.py
Normal file
@ -0,0 +1,108 @@
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
from fHDHR.exceptions import TunerError
|
||||
from fHDHR.tools import humanized_time
|
||||
|
||||
|
||||
class Tuner():
|
||||
def __init__(self, inum, epg, logger):
|
||||
self.logger = logger
|
||||
self.number = inum
|
||||
self.epg = epg
|
||||
self.tuner_lock = threading.Lock()
|
||||
self.set_off_status()
|
||||
|
||||
def grab(self, stream_args):
|
||||
if self.tuner_lock.locked():
|
||||
raise TunerError("Tuner #" + str(self.number) + " is not available.")
|
||||
|
||||
self.logger.info("Tuner #" + str(self.number) + " to be used for stream.")
|
||||
self.tuner_lock.acquire()
|
||||
self.status = {
|
||||
"status": "Active",
|
||||
"method": stream_args["method"],
|
||||
"accessed": stream_args["accessed"],
|
||||
"proxied_url": stream_args["channelUri"],
|
||||
"time_start": datetime.datetime.utcnow(),
|
||||
}
|
||||
|
||||
def close(self):
|
||||
self.logger.info("Tuner #" + str(self.number) + " Shutting Down.")
|
||||
self.set_off_status()
|
||||
self.tuner_lock.release()
|
||||
|
||||
def get_status(self):
|
||||
current_status = self.status.copy()
|
||||
if current_status["status"] == "Active":
|
||||
current_status["Play Time"] = str(
|
||||
humanized_time(
|
||||
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||
current_status["time_start"] = str(current_status["time_start"])
|
||||
current_status["epg"] = self.epg.whats_on_now(current_status["accessed"].split("v")[-1])
|
||||
return current_status
|
||||
|
||||
def set_off_status(self):
|
||||
self.status = {"status": "Inactive"}
|
||||
|
||||
|
||||
class Tuners():
|
||||
|
||||
def __init__(self, settings, epg, logger):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.epg = epg
|
||||
self.max_tuners = int(self.config.dict["fhdhr"]["tuner_count"])
|
||||
|
||||
for i in range(1, self.max_tuners + 1):
|
||||
exec("%s = %s" % ("self.tuner_" + str(i), "Tuner(i, epg, logger)"))
|
||||
|
||||
def tuner_grab(self, stream_args, tunernum=None):
|
||||
tunerselected = None
|
||||
|
||||
if tunernum:
|
||||
if tunernum not in range(1, self.max_tuners + 1):
|
||||
raise TunerError("Tuner " + str(tunernum) + " does not exist.")
|
||||
eval("self.tuner_" + str(tunernum) + ".grab(stream_args)")
|
||||
tunerselected = tunernum
|
||||
|
||||
else:
|
||||
|
||||
for tunernum in range(1, self.max_tuners + 1):
|
||||
try:
|
||||
eval("self.tuner_" + str(tunernum) + ".grab(stream_args)")
|
||||
except TunerError:
|
||||
continue
|
||||
else:
|
||||
tunerselected = tunernum
|
||||
break
|
||||
|
||||
if not tunerselected:
|
||||
raise TunerError("No Available Tuners.")
|
||||
else:
|
||||
return tunerselected
|
||||
|
||||
def tuner_close(self, tunernum):
|
||||
eval("self.tuner_" + str(tunernum) + ".close()")
|
||||
|
||||
def status(self):
|
||||
all_status = {}
|
||||
for tunernum in range(1, self.max_tuners + 1):
|
||||
all_status[tunernum] = eval("self.tuner_" + str(tunernum) + ".get_status()")
|
||||
return all_status
|
||||
|
||||
def available_tuner_count(self):
|
||||
available_tuners = 0
|
||||
for tunernum in range(1, self.max_tuners + 1):
|
||||
tuner_status = eval("self.tuner_" + str(tunernum) + ".get_status()")
|
||||
if tuner_status["status"] == "Inactive":
|
||||
available_tuners += 1
|
||||
return available_tuners
|
||||
|
||||
def inuse_tuner_count(self):
|
||||
inuse_tuners = 0
|
||||
for tunernum in range(1, self.max_tuners + 1):
|
||||
tuner_status = eval("self.tuner_" + str(tunernum) + ".get_status()")
|
||||
if tuner_status["status"] == "Active":
|
||||
inuse_tuners += 1
|
||||
return inuse_tuners
|
||||
@ -1,229 +0,0 @@
|
||||
import m3u8
|
||||
|
||||
from fHDHR.exceptions import TunerError
|
||||
|
||||
from .tuner import Tuner
|
||||
|
||||
|
||||
class Tuners():
|
||||
|
||||
def __init__(self, fhdhr, epg, channels):
|
||||
self.fhdhr = fhdhr
|
||||
self.channels = channels
|
||||
|
||||
self.epg = epg
|
||||
|
||||
self.tuners = {}
|
||||
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||
self.tuners[origin] = {}
|
||||
|
||||
max_tuners = int(self.fhdhr.origins.origins_dict[origin].tuners)
|
||||
|
||||
self.fhdhr.logger.info("Creating %s tuners for %s." % (max_tuners, origin))
|
||||
|
||||
for i in range(0, max_tuners):
|
||||
self.tuners[origin][str(i)] = Tuner(fhdhr, i, epg, origin)
|
||||
|
||||
self.alt_stream_handlers = {}
|
||||
|
||||
def alt_stream_methods_selfadd(self):
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_stream":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name
|
||||
self.alt_stream_handlers[method] = self.fhdhr.plugins.plugins[plugin_name]
|
||||
|
||||
def get_available_tuner(self, origin):
|
||||
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if not self.tuners[origin][tunernum].tuner_lock.locked()) or None
|
||||
|
||||
def get_scanning_tuner(self, origin):
|
||||
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if self.tuners[origin][tunernum].status["status"] == "Scanning") or None
|
||||
|
||||
def stop_tuner_scan(self, origin):
|
||||
tunernum = self.get_scanning_tuner(origin)
|
||||
if tunernum:
|
||||
self.tuners[origin][str(tunernum)].close()
|
||||
|
||||
def tuner_scan(self, origin="all"):
|
||||
"""Temporarily use a tuner for a scan"""
|
||||
|
||||
if origin == "all":
|
||||
origins = list(self.tuners.keys())
|
||||
else:
|
||||
origins = [origin]
|
||||
|
||||
for origin in origins:
|
||||
|
||||
if not self.available_tuner_count(origin):
|
||||
raise TunerError("805 - All Tuners In Use")
|
||||
|
||||
tunernumber = self.get_available_tuner(origin)
|
||||
self.tuners[origin][str(tunernumber)].channel_scan(origin)
|
||||
|
||||
if not tunernumber:
|
||||
raise TunerError("805 - All Tuners In Use")
|
||||
|
||||
def tuner_grab(self, tuner_number, origin, channel_number):
|
||||
|
||||
if str(tuner_number) not in list(self.tuners[origin].keys()):
|
||||
self.fhdhr.logger.error("Tuner %s does not exist for %s." % (tuner_number, origin))
|
||||
raise TunerError("806 - Tune Failed")
|
||||
|
||||
# TunerError will raise if unavailable
|
||||
self.tuners[origin][str(tuner_number)].grab(origin, channel_number)
|
||||
|
||||
return tuner_number
|
||||
|
||||
def first_available(self, origin, channel_number, dograb=True):
|
||||
|
||||
if not self.available_tuner_count(origin):
|
||||
raise TunerError("805 - All Tuners In Use")
|
||||
|
||||
tunernumber = self.get_available_tuner(origin)
|
||||
|
||||
if not tunernumber:
|
||||
raise TunerError("805 - All Tuners In Use")
|
||||
else:
|
||||
self.tuners[origin][str(tunernumber)].grab(origin, channel_number)
|
||||
return tunernumber
|
||||
|
||||
def tuner_close(self, tunernum, origin):
|
||||
self.tuners[origin][str(tunernum)].close()
|
||||
|
||||
def status(self, origin=None):
|
||||
all_status = {}
|
||||
if origin:
|
||||
for tunernum in list(self.tuners[origin].keys()):
|
||||
all_status[tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
||||
else:
|
||||
for origin in list(self.tuners.keys()):
|
||||
all_status[origin] = {}
|
||||
for tunernum in list(self.tuners[origin].keys()):
|
||||
all_status[origin][tunernum] = self.tuners[origin][str(tunernum)].get_status()
|
||||
return all_status
|
||||
|
||||
def available_tuner_count(self, origin):
|
||||
available_tuners = 0
|
||||
for tunernum in list(self.tuners[origin].keys()):
|
||||
if not self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
||||
available_tuners += 1
|
||||
return available_tuners
|
||||
|
||||
def inuse_tuner_count(self, origin):
|
||||
inuse_tuners = 0
|
||||
for tunernum in list(self.tuners[origin].keys()):
|
||||
if self.tuners[origin][str(tunernum)].tuner_lock.locked():
|
||||
inuse_tuners += 1
|
||||
return inuse_tuners
|
||||
|
||||
def get_stream_info(self, stream_args):
|
||||
|
||||
stream_info = self.channels.get_channel_stream(stream_args, stream_args["origin"])
|
||||
if not stream_info:
|
||||
raise TunerError("806 - Tune Failed")
|
||||
|
||||
if isinstance(stream_info, str):
|
||||
stream_info = {"url": stream_info, "headers": None}
|
||||
stream_args["stream_info"] = stream_info
|
||||
|
||||
if not stream_args["stream_info"]["url"]:
|
||||
raise TunerError("806 - Tune Failed")
|
||||
|
||||
if "headers" not in list(stream_args["stream_info"].keys()):
|
||||
stream_args["stream_info"]["headers"] = None
|
||||
|
||||
if stream_args["stream_info"]["url"].startswith("udp://"):
|
||||
stream_args["true_content_type"] = "video/mpeg"
|
||||
stream_args["content_type"] = "video/mpeg"
|
||||
else:
|
||||
|
||||
channel_stream_url_headers = self.fhdhr.web.session.head(stream_args["stream_info"]["url"]).headers
|
||||
stream_args["true_content_type"] = channel_stream_url_headers['Content-Type']
|
||||
|
||||
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||
stream_args["content_type"] = "video/mpeg"
|
||||
if stream_args["origin_quality"] != -1:
|
||||
stream_args["stream_info"]["url"] = self.m3u8_quality(stream_args)
|
||||
else:
|
||||
stream_args["content_type"] = stream_args["true_content_type"]
|
||||
|
||||
return stream_args
|
||||
|
||||
def m3u8_quality(self, stream_args):
|
||||
|
||||
m3u8_url = stream_args["stream_info"]["url"]
|
||||
quality_profile = stream_args["origin_quality"]
|
||||
|
||||
if not quality_profile:
|
||||
if stream_args["method"] == "direct":
|
||||
quality_profile = "high"
|
||||
self.fhdhr.logger.info("Origin Quality not set in config. Direct Method set and will default to Highest Quality")
|
||||
else:
|
||||
self.fhdhr.logger.info("Origin Quality not set in config. %s Method will select the Quality Automatically" % stream_args["method"])
|
||||
return m3u8_url
|
||||
else:
|
||||
quality_profile = quality_profile.lower()
|
||||
self.fhdhr.logger.info("Origin Quality set in config to %s" % (quality_profile))
|
||||
|
||||
while True:
|
||||
self.fhdhr.logger.info("Opening m3u8 for reading %s" % m3u8_url)
|
||||
|
||||
try:
|
||||
if stream_args["stream_info"]["headers"]:
|
||||
videoUrlM3u = m3u8.load(m3u8_url, headers=stream_args["stream_info"]["headers"])
|
||||
else:
|
||||
videoUrlM3u = m3u8.load(m3u8_url)
|
||||
except Exception as e:
|
||||
self.fhdhr.logger.info("m3u8 load error: %s" % e)
|
||||
return m3u8_url
|
||||
|
||||
if len(videoUrlM3u.playlists):
|
||||
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
||||
|
||||
# Create list of dicts
|
||||
playlists, playlist_index = {}, 0
|
||||
for playlist_item in videoUrlM3u.playlists:
|
||||
playlist_index += 1
|
||||
playlist_dict = {
|
||||
"url": playlist_item.absolute_uri,
|
||||
"bandwidth": playlist_item.stream_info.bandwidth,
|
||||
}
|
||||
|
||||
if not playlist_item.stream_info.resolution:
|
||||
playlist_dict["width"] = None
|
||||
playlist_dict["height"] = None
|
||||
else:
|
||||
try:
|
||||
playlist_dict["width"] = playlist_item.stream_info.resolution[0]
|
||||
playlist_dict["height"] = playlist_item.stream_info.resolution[1]
|
||||
except TypeError:
|
||||
playlist_dict["width"] = None
|
||||
playlist_dict["height"] = None
|
||||
|
||||
playlists[playlist_index] = playlist_dict
|
||||
|
||||
sorted_playlists = sorted(playlists, key=lambda i: (
|
||||
int(playlists[i]['bandwidth']),
|
||||
int(playlists[i]['width'] or 0),
|
||||
int(playlists[i]['height'] or 0)
|
||||
))
|
||||
sorted_playlists = [playlists[x] for x in sorted_playlists]
|
||||
|
||||
if not quality_profile or quality_profile == "high":
|
||||
selected_index = -1
|
||||
elif quality_profile == "medium":
|
||||
selected_index = int((len(sorted_playlists) - 1)/2)
|
||||
elif quality_profile == "low":
|
||||
selected_index = 0
|
||||
|
||||
m3u8_stats = ",".join(
|
||||
["%s %s" % (x, sorted_playlists[selected_index][x])
|
||||
for x in list(sorted_playlists[selected_index].keys())
|
||||
if x != "url" and sorted_playlists[selected_index][x]])
|
||||
self.fhdhr.logger.info("Selected m3u8 details: %s" % m3u8_stats)
|
||||
m3u8_url = sorted_playlists[selected_index]["url"]
|
||||
|
||||
else:
|
||||
self.fhdhr.logger.info("No m3u8 varients found")
|
||||
break
|
||||
|
||||
return m3u8_url
|
||||
@ -1,23 +0,0 @@
|
||||
|
||||
|
||||
from .direct_stream import Direct_Stream
|
||||
from .direct_m3u8_stream import Direct_M3U8_Stream
|
||||
|
||||
|
||||
class Stream():
|
||||
|
||||
def __init__(self, fhdhr, stream_args, tuner):
|
||||
self.fhdhr = fhdhr
|
||||
self.stream_args = stream_args
|
||||
|
||||
if stream_args["method"] == "direct":
|
||||
if self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
||||
else:
|
||||
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||
else:
|
||||
plugin_name = self.fhdhr.config.dict["streaming"]["valid_methods"][stream_args["method"]]["plugin"]
|
||||
self.method = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, self.fhdhr.plugins.plugins[plugin_name].plugin_utils, stream_args, tuner)
|
||||
|
||||
def get(self):
|
||||
return self.method.get()
|
||||
@ -1,109 +0,0 @@
|
||||
import sys
|
||||
import time
|
||||
import m3u8
|
||||
|
||||
from Crypto.Cipher import AES
|
||||
|
||||
# from fHDHR.exceptions import TunerError
|
||||
|
||||
|
||||
class Direct_M3U8_Stream():
|
||||
|
||||
def __init__(self, fhdhr, stream_args, tuner):
|
||||
self.fhdhr = fhdhr
|
||||
self.stream_args = stream_args
|
||||
self.tuner = tuner
|
||||
|
||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||
|
||||
def get(self):
|
||||
|
||||
if not self.stream_args["duration"] == 0:
|
||||
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||
|
||||
self.fhdhr.logger.info("Detected stream of m3u8 URL: %s" % self.stream_args["stream_info"]["url"])
|
||||
|
||||
if self.stream_args["transcode_quality"]:
|
||||
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
||||
|
||||
def generate():
|
||||
|
||||
try:
|
||||
|
||||
played_chunk_urls = []
|
||||
|
||||
while self.tuner.tuner_lock.locked():
|
||||
|
||||
try:
|
||||
if self.stream_args["stream_info"]["headers"]:
|
||||
playlist = m3u8.load(self.stream_args["stream_info"]["url"], headers=self.stream_args["stream_info"]["headers"])
|
||||
else:
|
||||
playlist = m3u8.load(self.stream_args["stream_info"]["url"])
|
||||
except Exception as e:
|
||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||
self.tuner.close()
|
||||
return None
|
||||
|
||||
segments = playlist.segments
|
||||
|
||||
if len(played_chunk_urls):
|
||||
newsegments = 0
|
||||
for segment in segments:
|
||||
if segment.absolute_uri not in played_chunk_urls:
|
||||
newsegments += 1
|
||||
self.fhdhr.logger.info("Refreshing m3u8, Loaded %s new segments." % str(newsegments))
|
||||
else:
|
||||
self.fhdhr.logger.info("Loaded %s segments." % str(len(segments)))
|
||||
|
||||
if playlist.keys != [None]:
|
||||
keys = [{"url": key.absolute_uri, "method": key.method, "iv": key.iv} for key in playlist.keys if key]
|
||||
else:
|
||||
keys = [None for i in range(0, len(segments))]
|
||||
|
||||
for segment, key in zip(segments, keys):
|
||||
chunkurl = segment.absolute_uri
|
||||
|
||||
if chunkurl and chunkurl not in played_chunk_urls:
|
||||
played_chunk_urls.append(chunkurl)
|
||||
|
||||
if (not self.stream_args["duration"] == 0 and
|
||||
not time.time() < self.stream_args["time_end"]):
|
||||
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||
self.tuner.close()
|
||||
|
||||
if self.stream_args["stream_info"]["headers"]:
|
||||
chunk = self.fhdhr.web.session.get(chunkurl, headers=self.stream_args["stream_info"]["headers"]).content
|
||||
else:
|
||||
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||
if not chunk:
|
||||
break
|
||||
# raise TunerError("807 - No Video Data")
|
||||
if key:
|
||||
if key["url"]:
|
||||
if self.stream_args["stream_info"]["headers"]:
|
||||
keyfile = self.fhdhr.web.session.get(key["url"], headers=self.stream_args["stream_info"]["headers"]).content
|
||||
else:
|
||||
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
||||
chunk = cryptor.decrypt(chunk)
|
||||
|
||||
chunk_size = int(sys.getsizeof(chunk))
|
||||
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s: %s" % (len(played_chunk_urls), chunk_size, chunkurl))
|
||||
yield chunk
|
||||
self.tuner.add_downloaded_size(chunk_size)
|
||||
|
||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||
|
||||
except GeneratorExit:
|
||||
self.fhdhr.logger.info("Connection Closed.")
|
||||
except Exception as e:
|
||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||
finally:
|
||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
||||
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
||||
self.tuner.close()
|
||||
# raise TunerError("806 - Tune Failed")
|
||||
|
||||
return generate()
|
||||
@ -1,72 +0,0 @@
|
||||
import sys
|
||||
import time
|
||||
|
||||
# from fHDHR.exceptions import TunerError
|
||||
|
||||
|
||||
class Direct_Stream():
|
||||
|
||||
def __init__(self, fhdhr, stream_args, tuner):
|
||||
self.fhdhr = fhdhr
|
||||
self.stream_args = stream_args
|
||||
self.tuner = tuner
|
||||
|
||||
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||
|
||||
def get(self):
|
||||
|
||||
if not self.stream_args["duration"] == 0:
|
||||
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||
|
||||
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
|
||||
|
||||
if self.stream_args["transcode_quality"]:
|
||||
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
|
||||
|
||||
if self.stream_args["stream_info"]["headers"]:
|
||||
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True, headers=self.stream_args["stream_info"]["headers"])
|
||||
else:
|
||||
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
|
||||
|
||||
def generate():
|
||||
|
||||
try:
|
||||
|
||||
chunk_counter = 1
|
||||
|
||||
while self.tuner.tuner_lock.locked():
|
||||
|
||||
for chunk in req.iter_content(chunk_size=self.bytes_per_read):
|
||||
|
||||
if (not self.stream_args["duration"] == 0 and
|
||||
not time.time() < self.stream_args["time_end"]):
|
||||
req.close()
|
||||
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||
self.tuner.close()
|
||||
|
||||
if not chunk:
|
||||
break
|
||||
# raise TunerError("807 - No Video Data")
|
||||
|
||||
chunk_size = int(sys.getsizeof(chunk))
|
||||
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s" % (chunk_counter, chunk_size))
|
||||
yield chunk
|
||||
self.tuner.add_downloaded_size(chunk_size)
|
||||
|
||||
chunk_counter += 1
|
||||
|
||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||
|
||||
except GeneratorExit:
|
||||
self.fhdhr.logger.info("Connection Closed.")
|
||||
except Exception as e:
|
||||
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||
finally:
|
||||
req.close()
|
||||
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
|
||||
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
|
||||
self.tuner.close()
|
||||
# raise TunerError("806 - Tune Failed")
|
||||
|
||||
return generate()
|
||||
@ -1,107 +0,0 @@
|
||||
import threading
|
||||
import datetime
|
||||
|
||||
from fHDHR.exceptions import TunerError
|
||||
from fHDHR.tools import humanized_time
|
||||
|
||||
from .stream import Stream
|
||||
|
||||
|
||||
class Tuner():
|
||||
def __init__(self, fhdhr, inum, epg, origin):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.number = inum
|
||||
self.origin = origin
|
||||
self.epg = epg
|
||||
|
||||
self.tuner_lock = threading.Lock()
|
||||
self.set_off_status()
|
||||
|
||||
self.chanscan_url = "/api/channels?method=scan"
|
||||
self.close_url = "/api/tuners?method=close&tuner=%s&origin=%s" % (self.number, self.origin)
|
||||
|
||||
def channel_scan(self, origin, grabbed=False):
|
||||
if self.tuner_lock.locked() and not grabbed:
|
||||
self.fhdhr.logger.error("%s Tuner #%s is not available." % (self.origin, self.number))
|
||||
raise TunerError("804 - Tuner In Use")
|
||||
|
||||
if self.status["status"] == "Scanning":
|
||||
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
||||
else:
|
||||
|
||||
if not grabbed:
|
||||
self.tuner_lock.acquire()
|
||||
self.status["status"] = "Scanning"
|
||||
self.status["origin"] = origin
|
||||
self.status["time_start"] = datetime.datetime.utcnow()
|
||||
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan for %s origin." % (self.number, origin))
|
||||
|
||||
chanscan = threading.Thread(target=self.runscan, args=(origin,))
|
||||
chanscan.start()
|
||||
|
||||
def runscan(self, origin):
|
||||
self.fhdhr.api.get("%s&origin=%s" % (self.chanscan_url, origin))
|
||||
self.fhdhr.logger.info("Requested Channel Scan for %s origin Complete." % origin)
|
||||
self.close()
|
||||
self.fhdhr.api.get(self.close_url)
|
||||
|
||||
def add_downloaded_size(self, bytes_count):
|
||||
if "downloaded" in list(self.status.keys()):
|
||||
self.status["downloaded"] += bytes_count
|
||||
|
||||
def grab(self, origin, channel_number):
|
||||
if self.tuner_lock.locked():
|
||||
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
|
||||
raise TunerError("804 - Tuner In Use")
|
||||
self.tuner_lock.acquire()
|
||||
self.status["status"] = "Acquired"
|
||||
self.status["origin"] = origin
|
||||
self.status["channel"] = channel_number
|
||||
self.status["time_start"] = datetime.datetime.utcnow()
|
||||
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
||||
|
||||
def close(self):
|
||||
self.set_off_status()
|
||||
if self.tuner_lock.locked():
|
||||
self.tuner_lock.release()
|
||||
self.fhdhr.logger.info("Tuner #%s Released." % self.number)
|
||||
|
||||
def get_status(self):
|
||||
current_status = self.status.copy()
|
||||
current_status["epg"] = {}
|
||||
if current_status["status"] in ["Acquired", "Active", "Scanning"]:
|
||||
current_status["running_time"] = str(
|
||||
humanized_time(
|
||||
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||
current_status["time_start"] = str(current_status["time_start"])
|
||||
if current_status["status"] in ["Active"]:
|
||||
if current_status["origin"] in self.epg.epg_methods:
|
||||
current_status["epg"] = self.epg.whats_on_now(current_status["channel"], method=current_status["origin"])
|
||||
return current_status
|
||||
|
||||
def set_off_status(self):
|
||||
self.status = {"status": "Inactive"}
|
||||
|
||||
def get_stream(self, stream_args, tuner):
|
||||
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||
return stream
|
||||
|
||||
def set_status(self, stream_args):
|
||||
if self.status["status"] != "Active":
|
||||
self.status = {
|
||||
"status": "Active",
|
||||
"clients": [],
|
||||
"clients_id": [],
|
||||
"method": stream_args["method"],
|
||||
"accessed": [stream_args["accessed"]],
|
||||
"origin": stream_args["origin"],
|
||||
"channel": stream_args["channel"],
|
||||
"proxied_url": stream_args["stream_info"]["url"],
|
||||
"time_start": datetime.datetime.utcnow(),
|
||||
"downloaded": 0
|
||||
}
|
||||
if stream_args["client"] not in self.status["clients"]:
|
||||
self.status["clients"].append(stream_args["client"])
|
||||
if stream_args["client_id"] not in self.status["clients_id"]:
|
||||
self.status["clients_id"].append(stream_args["client_id"])
|
||||
117
fHDHR/device/watch.py
Normal file
@ -0,0 +1,117 @@
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
from fHDHR.exceptions import TunerError
|
||||
|
||||
|
||||
class WatchStream():
|
||||
|
||||
def __init__(self, settings, origserv, tuners, logger, web):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.origserv = origserv
|
||||
self.tuners = tuners
|
||||
self.web = web
|
||||
|
||||
def direct_stream(self, stream_args, tunernum):
|
||||
|
||||
chunksize = int(self.tuners.config.dict["direct_stream"]['chunksize'])
|
||||
|
||||
if not stream_args["duration"] == 0:
|
||||
stream_args["duration"] += time.time()
|
||||
|
||||
req = self.web.session.get(stream_args["channelUri"], stream=True)
|
||||
|
||||
def generate():
|
||||
try:
|
||||
for chunk in req.iter_content(chunk_size=chunksize):
|
||||
|
||||
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
||||
req.close()
|
||||
self.logger.info("Requested Duration Expired.")
|
||||
break
|
||||
|
||||
yield chunk
|
||||
|
||||
except GeneratorExit:
|
||||
req.close()
|
||||
self.logger.info("Connection Closed.")
|
||||
self.tuners.tuner_close(tunernum)
|
||||
|
||||
return generate()
|
||||
|
||||
def ffmpeg_stream(self, stream_args, tunernum):
|
||||
|
||||
bytes_per_read = int(self.config.dict["ffmpeg"]["bytes_per_read"])
|
||||
|
||||
ffmpeg_command = [self.config.dict["ffmpeg"]["ffmpeg_path"],
|
||||
"-i", stream_args["channelUri"],
|
||||
"-c", "copy",
|
||||
"-f", "mpegts",
|
||||
"-nostats", "-hide_banner",
|
||||
"-loglevel", "fatal",
|
||||
"pipe:stdout"
|
||||
]
|
||||
|
||||
if not stream_args["duration"] == 0:
|
||||
stream_args["duration"] += time.time()
|
||||
|
||||
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||
|
||||
def generate():
|
||||
try:
|
||||
while True:
|
||||
|
||||
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
|
||||
ffmpeg_proc.terminate()
|
||||
ffmpeg_proc.communicate()
|
||||
self.logger.info("Requested Duration Expired.")
|
||||
break
|
||||
|
||||
videoData = ffmpeg_proc.stdout.read(bytes_per_read)
|
||||
if not videoData:
|
||||
break
|
||||
|
||||
try:
|
||||
yield videoData
|
||||
|
||||
except Exception as e:
|
||||
ffmpeg_proc.terminate()
|
||||
ffmpeg_proc.communicate()
|
||||
self.logger.info("Connection Closed: " + str(e))
|
||||
|
||||
except GeneratorExit:
|
||||
ffmpeg_proc.terminate()
|
||||
ffmpeg_proc.communicate()
|
||||
self.logger.info("Connection Closed.")
|
||||
self.tuners.tuner_close(tunernum)
|
||||
|
||||
return generate()
|
||||
|
||||
def get_stream(self, stream_args):
|
||||
|
||||
try:
|
||||
tunernum = self.tuners.tuner_grab(stream_args)
|
||||
except TunerError as e:
|
||||
self.logger.info("A " + stream_args["method"] + " stream request for channel " +
|
||||
str(stream_args["channel"]) + " was rejected do to " + str(e))
|
||||
return
|
||||
|
||||
self.logger.info("Attempting a " + stream_args["method"] + " stream request for channel " + str(stream_args["channel"]))
|
||||
|
||||
if stream_args["method"] == "ffmpeg":
|
||||
return self.ffmpeg_stream(stream_args, tunernum)
|
||||
elif stream_args["method"] == "direct":
|
||||
return self.direct_stream(stream_args, tunernum)
|
||||
|
||||
def get_stream_info(self, stream_args):
|
||||
|
||||
stream_args["channelUri"] = self.origserv.get_channel_stream(str(stream_args["channel"]))
|
||||
if not stream_args["channelUri"]:
|
||||
self.logger.error("Could not Obtain Channel Stream.")
|
||||
stream_args["content_type"] = "video/mpeg"
|
||||
else:
|
||||
channelUri_headers = self.web.session.head(stream_args["channelUri"]).headers
|
||||
stream_args["content_type"] = channelUri_headers['Content-Type']
|
||||
|
||||
return stream_args
|
||||
64
fHDHR/http/__init__.py
Normal file
@ -0,0 +1,64 @@
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from flask import Flask
|
||||
|
||||
from .pages import fHDHR_Pages
|
||||
from .files import fHDHR_Files
|
||||
from .api import fHDHR_API
|
||||
|
||||
|
||||
class fHDHR_HTTP_Server():
|
||||
app = None
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.app = Flask("fHDHR")
|
||||
|
||||
self.pages = fHDHR_Pages(fhdhr)
|
||||
self.add_endpoints(self.pages, "pages")
|
||||
|
||||
self.files = fHDHR_Files(fhdhr)
|
||||
self.add_endpoints(self.files, "files")
|
||||
|
||||
self.api = fHDHR_API(fhdhr)
|
||||
self.add_endpoints(self.api, "api")
|
||||
|
||||
def add_endpoints(self, index_list, index_name):
|
||||
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||
for item in item_list:
|
||||
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
|
||||
if isinstance(endpoints, str):
|
||||
endpoints = [endpoints]
|
||||
handler = eval("self." + str(index_name) + "." + str(item))
|
||||
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
|
||||
try:
|
||||
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
|
||||
except AttributeError:
|
||||
endpoint_methods = ['GET']
|
||||
for endpoint in endpoints:
|
||||
self.add_endpoint(endpoint=endpoint,
|
||||
endpoint_name=endpoint_name,
|
||||
handler=handler,
|
||||
methods=endpoint_methods)
|
||||
|
||||
def isapath(self, item):
|
||||
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
|
||||
if item in not_a_page_list:
|
||||
return False
|
||||
elif item.startswith("__") and item.endswith("__"):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||
|
||||
def run(self):
|
||||
self.http = WSGIServer((
|
||||
self.fhdhr.config.dict["fhdhr"]["address"],
|
||||
int(self.fhdhr.config.dict["fhdhr"]["port"])
|
||||
), self.app.wsgi_app)
|
||||
try:
|
||||
self.http.serve_forever()
|
||||
except KeyboardInterrupt:
|
||||
self.http.stop()
|
||||
24
fHDHR/http/api/__init__.py
Normal file
@ -0,0 +1,24 @@
|
||||
|
||||
from .cluster import Cluster
|
||||
from .channels import Channels
|
||||
from .lineup_post import Lineup_Post
|
||||
from .xmltv import xmlTV
|
||||
from .m3u import M3U
|
||||
from .debug import Debug_JSON
|
||||
|
||||
from .images import Images
|
||||
|
||||
|
||||
class fHDHR_API():
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.cluster = Cluster(fhdhr)
|
||||
self.channels = Channels(fhdhr)
|
||||
self.xmltv = xmlTV(fhdhr)
|
||||
self.m3u = M3U(fhdhr)
|
||||
self.debug = Debug_JSON(fhdhr)
|
||||
self.lineup_post = Lineup_Post(fhdhr)
|
||||
|
||||
self.images = Images(fhdhr)
|
||||
32
fHDHR/http/api/channels.py
Normal file
@ -0,0 +1,32 @@
|
||||
from flask import request, redirect
|
||||
import urllib.parse
|
||||
|
||||
|
||||
class Channels():
|
||||
endpoints = ["/api/channels"]
|
||||
endpoint_name = "api_channels"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
method = request.args.get('method', default=None, type=str)
|
||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||
|
||||
if method == "scan":
|
||||
self.fhdhr.device.station_scan.scan()
|
||||
|
||||
else:
|
||||
return "Invalid Method"
|
||||
|
||||
if redirect_url:
|
||||
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||
else:
|
||||
if method == "scan":
|
||||
return redirect('/lineup_status.json')
|
||||
else:
|
||||
return "%s Success" % method
|
||||
52
fHDHR/http/api/cluster.py
Normal file
@ -0,0 +1,52 @@
|
||||
from flask import request, redirect, Response
|
||||
import urllib.parse
|
||||
import json
|
||||
|
||||
|
||||
class Cluster():
|
||||
endpoints = ["/api/cluster"]
|
||||
endpoint_name = "api_cluster"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
method = request.args.get('method', default="get", type=str)
|
||||
location = request.args.get("location", default=None, type=str)
|
||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||
|
||||
if method == "get":
|
||||
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||
|
||||
return Response(status=200,
|
||||
response=cluster_json,
|
||||
mimetype='application/json')
|
||||
|
||||
elif method == "scan":
|
||||
self.fhdhr.device.ssdp.m_search()
|
||||
|
||||
elif method == 'add':
|
||||
self.fhdhr.device.cluster.add(location)
|
||||
elif method == 'del':
|
||||
self.fhdhr.device.cluster.remove(location)
|
||||
|
||||
elif method == 'sync':
|
||||
self.fhdhr.device.cluster.sync(location)
|
||||
|
||||
elif method == 'leave':
|
||||
self.fhdhr.device.cluster.leave()
|
||||
elif method == 'disconnect':
|
||||
self.fhdhr.device.cluster.disconnect()
|
||||
|
||||
else:
|
||||
return "Invalid Method"
|
||||
|
||||
if redirect_url:
|
||||
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||
else:
|
||||
return "%s Success" % method
|
||||
@ -5,7 +5,6 @@ import json
|
||||
class Debug_JSON():
|
||||
endpoints = ["/api/debug"]
|
||||
endpoint_name = "api_debug"
|
||||
endpoint_methods = ["GET", "POST"]
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
@ -19,16 +18,11 @@ class Debug_JSON():
|
||||
|
||||
debugjson = {
|
||||
"base_url": base_url,
|
||||
"total channels": self.fhdhr.device.channels.get_station_total(),
|
||||
"tuner status": self.fhdhr.device.tuners.status(),
|
||||
}
|
||||
|
||||
for origin in list(self.fhdhr.origins.origins_dict.keys()):
|
||||
debugjson[origin] = {
|
||||
"tuner status": self.fhdhr.device.tuners.status(origin),
|
||||
"total channels": len(list(self.fhdhr.device.channels.list[origin].keys()))
|
||||
}
|
||||
|
||||
debug_json = json.dumps(debugjson, indent=4)
|
||||
cluster_json = json.dumps(debugjson, indent=4)
|
||||
|
||||
return Response(status=200,
|
||||
response=debug_json,
|
||||
response=cluster_json,
|
||||
mimetype='application/json')
|
||||
@ -4,12 +4,6 @@ from flask import request, Response, abort
|
||||
class Images():
|
||||
endpoints = ["/api/images"]
|
||||
endpoint_name = "api_images"
|
||||
endpoint_methods = ["GET", "POST"]
|
||||
endpoint_default_parameters = {
|
||||
"method": "generate",
|
||||
"type": "content",
|
||||
"message": "Internal Image Handling"
|
||||
}
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
@ -31,7 +25,7 @@ class Images():
|
||||
|
||||
elif method == "get":
|
||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||
if source in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
||||
if source in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||
image_type = request.args.get('type', default="content", type=str)
|
||||
if image_type in ["content", "channel"]:
|
||||
image_id = request.args.get('id', default=None, type=str)
|
||||
31
fHDHR/http/api/lineup_post.py
Normal file
@ -0,0 +1,31 @@
|
||||
from flask import request, abort, Response
|
||||
|
||||
|
||||
class Lineup_Post():
|
||||
endpoints = ["/lineup.post"]
|
||||
endpoint_name = "lineup_post"
|
||||
endpoint_methods = ["POST"]
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
if 'scan' in list(request.args.keys()):
|
||||
|
||||
if request.args['scan'] == 'start':
|
||||
self.fhdhr.device.station_scan.scan()
|
||||
return Response(status=200, mimetype='text/html')
|
||||
|
||||
elif request.args['scan'] == 'abort':
|
||||
return Response(status=200, mimetype='text/html')
|
||||
|
||||
else:
|
||||
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
|
||||
return abort(200, "Not a valid scan command")
|
||||
|
||||
else:
|
||||
return abort(501, "Not a valid command")
|
||||
83
fHDHR/http/api/m3u.py
Normal file
@ -0,0 +1,83 @@
|
||||
from flask import Response, request, redirect
|
||||
import urllib.parse
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class M3U():
|
||||
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||
endpoint_name = "api_m3u"
|
||||
xmltv_xml = None
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
|
||||
method = request.args.get('method', default="get", type=str)
|
||||
channel = request.args.get('channel', default="all", type=str)
|
||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||
|
||||
if method == "get":
|
||||
|
||||
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||
RECORD_MARKER = "#EXTINF"
|
||||
|
||||
fakefile = StringIO()
|
||||
|
||||
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||
|
||||
fakefile.write(
|
||||
"%s\n" % (
|
||||
FORMAT_DESCRIPTOR + " " +
|
||||
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
||||
"x-tvg-url=\"" + xmltvurl + "\"")
|
||||
)
|
||||
|
||||
channel_list = self.fhdhr.device.channels.get_channels()
|
||||
channel_number_list = [x["number"] for x in channel_list]
|
||||
|
||||
if channel == "all":
|
||||
channel_items = channel_list
|
||||
elif channel in channel_number_list:
|
||||
channel_items = [self.fhdhr.device.channels.get_channel_dict("number", channel)]
|
||||
else:
|
||||
return "Invalid Channel"
|
||||
|
||||
for channel_item in channel_items:
|
||||
|
||||
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||
(base_url, str(channel_item['id'])))
|
||||
|
||||
fakefile.write(
|
||||
"%s\n" % (
|
||||
RECORD_MARKER + ":0" + " " +
|
||||
"channelID=\"" + str(channel_item['id']) + "\" " +
|
||||
"tvg-chno=\"" + str(channel_item['number']) + "\" " +
|
||||
"tvg-name=\"" + str(channel_item['name']) + "\" " +
|
||||
"tvg-id=\"" + str(channel_item['number']) + "\" " +
|
||||
"tvg-logo=\"" + logourl + "\" " +
|
||||
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "," + str(channel_item['name']))
|
||||
)
|
||||
|
||||
fakefile.write(
|
||||
"%s\n" % (
|
||||
('%s/auto/v%s' %
|
||||
(base_url, str(channel_item['number'])))
|
||||
)
|
||||
)
|
||||
|
||||
channels_m3u = fakefile.getvalue()
|
||||
|
||||
return Response(status=200,
|
||||
response=channels_m3u,
|
||||
mimetype='text/plain')
|
||||
|
||||
if redirect_url:
|
||||
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||
else:
|
||||
return "%s Success" % method
|
||||
@ -1,8 +1,7 @@
|
||||
from flask import Response, request, redirect, session
|
||||
from flask import Response, request, redirect
|
||||
import xml.etree.ElementTree
|
||||
from io import BytesIO
|
||||
import urllib.parse
|
||||
import datetime
|
||||
|
||||
from fHDHR.tools import sub_el
|
||||
|
||||
@ -11,26 +10,19 @@ class xmlTV():
|
||||
"""Methods to create xmltv.xml"""
|
||||
endpoints = ["/api/xmltv", "/xmltv.xml"]
|
||||
endpoint_name = "api_xmltv"
|
||||
endpoint_methods = ["GET", "POST"]
|
||||
xmltv_xml = None
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.xmltv_offset = {}
|
||||
for epg_method in list(self.fhdhr.device.epg.epg_handling.keys()):
|
||||
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||
if "xmltv_offset" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||
self.xmltv_offset[epg_method] = self.fhdhr.config.dict[epg_method]["xmltv_offset"]
|
||||
if epg_method not in list(self.xmltv_offset.keys()):
|
||||
self.xmltv_offset[epg_method] = self.fhdhr.config.dict["epg"]["xmltv_offset"]
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||
if session["deviceauth"] != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
||||
if DeviceAuth != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||
return "not subscribed"
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
@ -38,7 +30,7 @@ class xmlTV():
|
||||
method = request.args.get('method', default="get", type=str)
|
||||
|
||||
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
|
||||
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||
return "%s Invalid xmltv method" % source
|
||||
|
||||
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||
@ -46,19 +38,7 @@ class xmlTV():
|
||||
if method == "get":
|
||||
|
||||
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||
|
||||
if source in self.fhdhr.origins.valid_origins:
|
||||
epgdict = epgdict.copy()
|
||||
for c in list(epgdict.keys()):
|
||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||
epgdict[chan_obj.number]["number"] = chan_obj.number
|
||||
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
||||
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
||||
|
||||
xmltv_xml = self.create_xmltv(base_url, epgdict, source)
|
||||
xmltv_xml = self.create_xmltv(base_url, epgdict)
|
||||
|
||||
return Response(status=200,
|
||||
response=xmltv_xml,
|
||||
@ -67,14 +47,11 @@ class xmlTV():
|
||||
elif method == "update":
|
||||
self.fhdhr.device.epg.update(source)
|
||||
|
||||
elif method == "clearcache":
|
||||
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||
|
||||
else:
|
||||
return "%s Invalid Method" % method
|
||||
|
||||
if redirect_url:
|
||||
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||
else:
|
||||
return "%s Success" % method
|
||||
|
||||
@ -84,7 +61,7 @@ class xmlTV():
|
||||
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||
xmltvgen.set('generator-info-url', 'fHDHR/%s' % self.fhdhr.config.dict["main"]["reponame"])
|
||||
xmltvgen.set('generator-info-url', 'fHDHR/' + self.fhdhr.config.dict["main"]["reponame"])
|
||||
return xmltvgen
|
||||
|
||||
def xmltv_file(self, xmltvgen):
|
||||
@ -98,31 +75,12 @@ class xmlTV():
|
||||
"""This method is called when creation of a full xmltv is not possible"""
|
||||
return self.xmltv_file(self.xmltv_headers())
|
||||
|
||||
def timestamp_to_datetime(self, time_start, time_end, source):
|
||||
xmltvtimetamps = {}
|
||||
source_offset = self.xmltv_offset[source]
|
||||
for time_item, time_value in zip(["time_start", "time_end"], [time_start, time_end]):
|
||||
timestampval = datetime.datetime.fromtimestamp(time_value).strftime('%Y%m%d%H%M%S')
|
||||
xmltvtimetamps[time_item] = "%s %s" % (timestampval, source_offset)
|
||||
return xmltvtimetamps
|
||||
|
||||
def create_xmltv(self, base_url, epgdict, source):
|
||||
def create_xmltv(self, base_url, epgdict):
|
||||
if not epgdict:
|
||||
return self.xmltv_empty()
|
||||
epgdict = epgdict.copy()
|
||||
|
||||
out = self.xmltv_headers()
|
||||
|
||||
if source in self.fhdhr.origins.valid_origins:
|
||||
for c in list(epgdict.keys()):
|
||||
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
|
||||
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||
epgdict[chan_obj.number]["number"] = chan_obj.number
|
||||
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
||||
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
||||
|
||||
for c in list(epgdict.keys()):
|
||||
|
||||
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||
@ -134,10 +92,13 @@ class xmlTV():
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||
|
||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||
sub_el(c_out, 'icon', src=("%s/api/images?method=get&type=channel&id=%s" % (base_url, epgdict[c]['id'])))
|
||||
if epgdict[c]["thumbnail"] is not None:
|
||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=channel&id=" + str(epgdict[c]['id'])))
|
||||
else:
|
||||
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||
else:
|
||||
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=channel&message=" + urllib.parse.quote(epgdict[c]['name'])))
|
||||
|
||||
for channelnum in list(epgdict.keys()):
|
||||
|
||||
@ -145,18 +106,16 @@ class xmlTV():
|
||||
|
||||
for program in channel_listing:
|
||||
|
||||
xmltvtimetamps = self.timestamp_to_datetime(program['time_start'], program['time_end'], source)
|
||||
|
||||
prog_out = sub_el(out, 'programme',
|
||||
start=xmltvtimetamps['time_start'],
|
||||
stop=xmltvtimetamps['time_end'],
|
||||
start=program['time_start'],
|
||||
stop=program['time_end'],
|
||||
channel=str(channelnum))
|
||||
|
||||
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||
|
||||
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||
|
||||
sub_el(prog_out, 'sub-title', lang='en', text='Movie: %s' % program['sub-title'])
|
||||
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||
|
||||
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||
|
||||
@ -178,11 +137,11 @@ class xmlTV():
|
||||
|
||||
if program["thumbnail"]:
|
||||
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||
sub_el(prog_out, 'icon', src=("%s/api/images?method=get&type=content&id=%s" % (base_url, program['id'])))
|
||||
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=content&id=" + str(program['id'])))
|
||||
else:
|
||||
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||
else:
|
||||
sub_el(prog_out, 'icon', src=("%s/api/images?method=generate&type=content&message=%s" % (base_url, urllib.parse.quote(program['title']))))
|
||||
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=content&message=" + urllib.parse.quote(program['title'])))
|
||||
|
||||
if program['rating']:
|
||||
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||
@ -1,32 +1,31 @@
|
||||
|
||||
|
||||
from .favicon_ico import Favicon_ICO
|
||||
from .style_css import Style_CSS
|
||||
|
||||
from .device_xml import Device_XML
|
||||
from .lineup_xml import Lineup_XML
|
||||
|
||||
from .discover_json import Discover_JSON
|
||||
from .lineup_json import Lineup_JSON
|
||||
from .lineup_status_json import Lineup_Status_JSON
|
||||
|
||||
from .lineup_post import Lineup_Post
|
||||
from .device_xml import HDHR_Device_XML
|
||||
|
||||
from .auto import Auto
|
||||
from .tuner import Tuner
|
||||
from .watch import Watch
|
||||
|
||||
|
||||
class Plugin_OBJ():
|
||||
class fHDHR_Files():
|
||||
|
||||
def __init__(self, fhdhr, plugin_utils):
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
self.plugin_utils = plugin_utils
|
||||
|
||||
self.lineup_post = Lineup_Post(fhdhr)
|
||||
|
||||
self.device_xml = HDHR_Device_XML(fhdhr)
|
||||
|
||||
self.auto = Auto(fhdhr)
|
||||
self.tuner = Tuner(fhdhr)
|
||||
self.favicon = Favicon_ICO(fhdhr)
|
||||
self.style = Style_CSS(fhdhr)
|
||||
|
||||
self.device_xml = Device_XML(fhdhr)
|
||||
self.lineup_xml = Lineup_XML(fhdhr)
|
||||
|
||||
self.discover_json = Discover_JSON(fhdhr)
|
||||
self.lineup_json = Lineup_JSON(fhdhr)
|
||||
self.lineup_status_json = Lineup_Status_JSON(fhdhr)
|
||||
|
||||
self.watch = Watch(fhdhr)
|
||||
@ -5,36 +5,37 @@ import xml.etree.ElementTree
|
||||
from fHDHR.tools import sub_el
|
||||
|
||||
|
||||
class Cluster_Device_XML():
|
||||
endpoints = ["/cluster/device.xml"]
|
||||
endpoint_name = "cluster_device_xml"
|
||||
class Device_XML():
|
||||
endpoints = ["/device.xml"]
|
||||
endpoint_name = "device_xml"
|
||||
|
||||
def __init__(self, fhdhr, plugin_utils):
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
self.plugin_utils = plugin_utils
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
"""Device.xml referenced from SSDP"""
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
|
||||
out = xml.etree.ElementTree.Element('root')
|
||||
out.set('xmlns', "upnp:rootdevice")
|
||||
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||
|
||||
sub_el(out, 'URLBase', "%s" % base_url)
|
||||
sub_el(out, 'URLBase', base_url)
|
||||
|
||||
specVersion_out = sub_el(out, 'specVersion')
|
||||
sub_el(specVersion_out, 'major', "1")
|
||||
sub_el(specVersion_out, 'minor', "0")
|
||||
|
||||
device_out = sub_el(out, 'device')
|
||||
|
||||
sub_el(device_out, 'deviceType', "upnp:rootdevice")
|
||||
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||
sub_el(device_out, 'UDN', "uuid:%s" % self.fhdhr.config.dict["main"]["uuid"])
|
||||
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||
sub_el(device_out, 'modelNumber', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||
sub_el(device_out, 'serialNumber')
|
||||
sub_el(device_out, 'UDN', "uuid:" + self.fhdhr.config.dict["main"]["uuid"])
|
||||
|
||||
fakefile = BytesIO()
|
||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
@ -3,16 +3,12 @@ import json
|
||||
|
||||
|
||||
class Discover_JSON():
|
||||
endpoints = ["/discover.json", "/hdhr/discover.json"]
|
||||
endpoint_name = "hdhr_discover_json"
|
||||
endpoints = ["/discover.json"]
|
||||
endpoint_name = "discover_json"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
@property
|
||||
def source(self):
|
||||
return self.fhdhr.config.dict["hdhr"]["source"] or self.fhdhr.origins.valid_origins[0]
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
@ -20,19 +16,17 @@ class Discover_JSON():
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
|
||||
origin = self.source
|
||||
|
||||
jsondiscover = {
|
||||
"FriendlyName": "%s %s" % (self.fhdhr.config.dict["fhdhr"]["friendlyname"], origin),
|
||||
"Manufacturer": self.fhdhr.config.dict["hdhr"]["reporting_manufacturer"],
|
||||
"ModelNumber": self.fhdhr.config.dict["hdhr"]["reporting_model"],
|
||||
"FirmwareName": self.fhdhr.config.dict["hdhr"]["reporting_firmware_name"],
|
||||
"TunerCount": self.fhdhr.origins.origins_dict[origin].tuners,
|
||||
"FirmwareVersion": self.fhdhr.config.dict["hdhr"]["reporting_firmware_ver"],
|
||||
"DeviceID": "%s%s" % (self.fhdhr.config.dict["main"]["uuid"], origin),
|
||||
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||
"BaseURL": "%s/hdhr" % base_url,
|
||||
"LineupURL": "%s/hdhr/lineup.json" % base_url
|
||||
"BaseURL": base_url,
|
||||
"LineupURL": base_url + "/lineup.json"
|
||||
}
|
||||
discover_json = json.dumps(jsondiscover, indent=4)
|
||||
|
||||
@ -3,7 +3,7 @@ from flask import send_from_directory
|
||||
|
||||
class Favicon_ICO():
|
||||
endpoints = ["/favicon.ico"]
|
||||
endpoint_name = "file_favicon_ico"
|
||||
endpoint_name = "favicon"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
@ -13,6 +13,6 @@ class Favicon_ICO():
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||
return send_from_directory(self.fhdhr.config.dict["filedir"]["www_dir"],
|
||||
'favicon.ico',
|
||||
mimetype='image/vnd.microsoft.icon')
|
||||
24
fHDHR/http/files/lineup_json.py
Normal file
@ -0,0 +1,24 @@
|
||||
from flask import Response, request
|
||||
import json
|
||||
|
||||
|
||||
class Lineup_JSON():
|
||||
endpoints = ["/lineup.json"]
|
||||
endpoint_name = "lineup_json"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
|
||||
jsonlineup = self.fhdhr.device.channels.get_station_list(base_url)
|
||||
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||
|
||||
return Response(status=200,
|
||||
response=lineup_json,
|
||||
mimetype='application/json')
|
||||
46
fHDHR/http/files/lineup_status_json.py
Normal file
@ -0,0 +1,46 @@
|
||||
from flask import Response
|
||||
import json
|
||||
|
||||
|
||||
class Lineup_Status_JSON():
|
||||
endpoints = ["/lineup_status.json"]
|
||||
endpoint_name = "lineup_status_json"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
station_scanning = self.fhdhr.device.station_scan.scanning()
|
||||
if station_scanning:
|
||||
jsonlineup = self.scan_in_progress()
|
||||
elif not self.fhdhr.device.channels.get_station_total():
|
||||
jsonlineup = self.scan_in_progress()
|
||||
else:
|
||||
jsonlineup = self.not_scanning()
|
||||
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||
|
||||
return Response(status=200,
|
||||
response=lineup_json,
|
||||
mimetype='application/json')
|
||||
|
||||
def scan_in_progress(self):
|
||||
channel_count = self.fhdhr.device.channels.get_station_total()
|
||||
jsonlineup = {
|
||||
"ScanInProgress": "true",
|
||||
"Progress": 99,
|
||||
"Found": channel_count
|
||||
}
|
||||
return jsonlineup
|
||||
|
||||
def not_scanning(self):
|
||||
jsonlineup = {
|
||||
"ScanInProgress": "false",
|
||||
"ScanPossible": "true",
|
||||
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||
}
|
||||
return jsonlineup
|
||||
37
fHDHR/http/files/lineup_xml.py
Normal file
@ -0,0 +1,37 @@
|
||||
from flask import Response, request
|
||||
from io import BytesIO
|
||||
import xml.etree.ElementTree
|
||||
|
||||
from fHDHR.tools import sub_el
|
||||
|
||||
|
||||
class Lineup_XML():
|
||||
endpoints = ["/lineup.xml"]
|
||||
endpoint_name = "lineup_xml"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
|
||||
out = xml.etree.ElementTree.Element('Lineup')
|
||||
station_list = self.fhdhr.device.channels.get_station_list(base_url)
|
||||
for station_item in station_list:
|
||||
program_out = sub_el(out, 'Program')
|
||||
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
|
||||
sub_el(program_out, 'GuideName', station_item['GuideName'])
|
||||
sub_el(program_out, 'URL', station_item['URL'])
|
||||
|
||||
fakefile = BytesIO()
|
||||
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||
lineup_xml = fakefile.getvalue()
|
||||
|
||||
return Response(status=200,
|
||||
response=lineup_xml,
|
||||
mimetype='application/xml')
|
||||
17
fHDHR/http/files/style_css.py
Normal file
@ -0,0 +1,17 @@
|
||||
from flask import send_from_directory
|
||||
|
||||
|
||||
class Style_CSS():
|
||||
endpoints = ["/style.css"]
|
||||
endpoint_name = "style"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
return send_from_directory(self.fhdhr.config.dict["filedir"]["www_dir"],
|
||||
'style.css')
|
||||
29
fHDHR/http/files/watch.py
Normal file
@ -0,0 +1,29 @@
|
||||
from flask import Response, request, stream_with_context, abort
|
||||
|
||||
|
||||
class Watch():
|
||||
endpoints = ['/auto/<channel>']
|
||||
endpoint_name = "auto"
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def __call__(self, channel, *args):
|
||||
return self.get(channel, *args)
|
||||
|
||||
def get(self, channel, *args):
|
||||
|
||||
base_url = request.url_root[:-1]
|
||||
stream_args = {
|
||||
"channel": channel.replace('v', ''),
|
||||
"method": request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str),
|
||||
"duration": request.args.get('duration', default=0, type=int),
|
||||
"accessed": self.fhdhr.device.channels.get_fhdhr_stream_url(base_url, channel.replace('v', '')),
|
||||
}
|
||||
stream_args = self.fhdhr.device.watch.get_stream_info(stream_args)
|
||||
if stream_args["channelUri"]:
|
||||
if stream_args["method"] == "direct":
|
||||
return Response(self.fhdhr.device.watch.get_stream(stream_args), content_type=stream_args["content_type"], direct_passthrough=True)
|
||||
elif stream_args["method"] == "ffmpeg":
|
||||
return Response(stream_with_context(self.fhdhr.device.watch.get_stream(stream_args)), mimetype="video/mpeg")
|
||||
abort(503)
|
||||
33
fHDHR/http/pages/__init__.py
Normal file
@ -0,0 +1,33 @@
|
||||
|
||||
|
||||
from .htmlerror import HTMLerror
|
||||
from .page_elements import fHDHR_Page_Elements
|
||||
from .index_html import Index_HTML
|
||||
from .origin_html import Origin_HTML
|
||||
from .cluster_html import Cluster_HTML
|
||||
from .diagnostics_html import Diagnostics_HTML
|
||||
from .streams_html import Streams_HTML
|
||||
from .version_html import Version_HTML
|
||||
from .guide_html import Guide_HTML
|
||||
from .xmltv_html import xmlTV_HTML
|
||||
|
||||
|
||||
class fHDHR_Pages():
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.page_elements = fHDHR_Page_Elements(fhdhr)
|
||||
|
||||
self.index = Index_HTML(fhdhr, self.page_elements)
|
||||
|
||||
self.htmlerror = HTMLerror(fhdhr)
|
||||
|
||||
self.index = Index_HTML(fhdhr, self.page_elements)
|
||||
self.origin = Origin_HTML(fhdhr, self.page_elements)
|
||||
self.cluster = Cluster_HTML(fhdhr, self.page_elements)
|
||||
self.diagnostics = Diagnostics_HTML(fhdhr, self.page_elements)
|
||||
self.version = Version_HTML(fhdhr, self.page_elements)
|
||||
self.guide = Guide_HTML(fhdhr, self.page_elements)
|
||||
self.streams = Streams_HTML(fhdhr, self.page_elements)
|
||||
self.xmltv = xmlTV_HTML(fhdhr, self.page_elements)
|
||||
88
fHDHR/http/pages/cluster_html.py
Normal file
@ -0,0 +1,88 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
import urllib.parse
|
||||
|
||||
|
||||
class Cluster_HTML():
|
||||
endpoints = ["/cluster", "/cluster.html"]
|
||||
endpoint_name = "cluster"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">Cluster</h4>")
|
||||
fakefile.write("\n")
|
||||
|
||||
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||
|
||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=scan&redirect=%2Fcluster", "Force Scan"))
|
||||
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=disconnect&redirect=%2Fcluster", "Disconnect"))
|
||||
fakefile.write("</div><br>\n")
|
||||
|
||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th>Name</th>\n")
|
||||
fakefile.write(" <th>Location</th>\n")
|
||||
fakefile.write(" <th>Joined</th>\n")
|
||||
fakefile.write(" <th>Options</th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
||||
for location in list(fhdhr_list.keys()):
|
||||
fakefile.write(" <tr>\n")
|
||||
|
||||
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
||||
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
||||
else:
|
||||
try:
|
||||
location_info_url = location + "/discover.json"
|
||||
locatation_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||
location_info = locatation_info_req.json()
|
||||
location_name = location_info["FriendlyName"]
|
||||
except self.fhdhr.web.exceptions.ConnectionError:
|
||||
self.fhdhr.logger.error("Unreachable: " + location)
|
||||
fakefile.write(" <td>%s</td>\n" % (str(location_name)))
|
||||
|
||||
fakefile.write(" <td>%s</td>\n" % (str(location)))
|
||||
|
||||
fakefile.write(" <td>%s</td>\n" % (str(fhdhr_list[location]["Joined"])))
|
||||
|
||||
fakefile.write(" <td>\n")
|
||||
fakefile.write(" <div>\n")
|
||||
location_url_query = urllib.parse.quote(location)
|
||||
fakefile.write(
|
||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||
(location, "Visit"))
|
||||
if not fhdhr_list[location]["Joined"]:
|
||||
fakefile.write(
|
||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||
("/api/cluster?method=add&location=" + location_url_query + "&redirect=%2Fcluster", "Add"))
|
||||
else:
|
||||
fakefile.write(
|
||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||
("/api/cluster?method=del&location=" + location_url_query + "&redirect=%2Fcluster", "Remove"))
|
||||
fakefile.write(" </div>\n")
|
||||
fakefile.write(" </td>\n")
|
||||
|
||||
fakefile.write(" </tr>\n")
|
||||
else:
|
||||
fakefile.write("<p style=\"text-align: center;\">Discovery Address must be set for SSDP/Cluster</p>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
46
fHDHR/http/pages/diagnostics_html.py
Normal file
@ -0,0 +1,46 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Diagnostics_HTML():
|
||||
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||
endpoint_name = "diagnostics"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.diagnostics_html = None
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
# a list of 2 part lists containing button information
|
||||
button_list = [
|
||||
["debug.json", "/api/debug"],
|
||||
["device.xml", "device.xml"],
|
||||
["discover.json", "discover.json"],
|
||||
["lineup.json", "lineup.json"],
|
||||
["lineup_status.json", "lineup_status.json"],
|
||||
["cluster.json", "/api/cluster?method=get"]
|
||||
]
|
||||
|
||||
for button_item in button_list:
|
||||
button_label = button_item[0]
|
||||
button_path = button_item[1]
|
||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||
fakefile.write("</div>\n")
|
||||
fakefile.write("\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
80
fHDHR/http/pages/guide_html.py
Normal file
@ -0,0 +1,80 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
import datetime
|
||||
|
||||
from fHDHR.tools import humanized_time
|
||||
|
||||
|
||||
class Guide_HTML():
|
||||
endpoints = ["/guide", "/guide.html"]
|
||||
endpoint_name = "guide"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||
|
||||
nowtime = datetime.datetime.utcnow()
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 id=\"mcetoc_1cdobsl3g0\" style=\"text-align: center;\"><span style=\"text-decoration: underline;\"><strong><em>What's On %s</em></strong></span></h4>\n" % friendlyname)
|
||||
fakefile.write("\n")
|
||||
|
||||
# a list of 2 part lists containing button information
|
||||
button_list = [
|
||||
["Force xmlTV Update", "/api/xmltv?method=update&redirect=%2Fguide"],
|
||||
]
|
||||
|
||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||
for button_item in button_list:
|
||||
button_label = button_item[0]
|
||||
button_path = button_item[1]
|
||||
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||
fakefile.write("</div>\n")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table style=\"width:100%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th>Play</th>\n")
|
||||
fakefile.write(" <th>Channel Name</th>\n")
|
||||
fakefile.write(" <th>Channel Number</th>\n")
|
||||
fakefile.write(" <th>Channel Thumbnail</th>\n")
|
||||
fakefile.write(" <th>Content Title</th>\n")
|
||||
fakefile.write(" <th>Content Thumbnail</th>\n")
|
||||
fakefile.write(" <th>Content Description</th>\n")
|
||||
fakefile.write(" <th>Content Remaining Time</th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for channel in self.fhdhr.device.epg.whats_on_allchans():
|
||||
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
||||
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
||||
play_url = ("/api/m3u?method=get&channel=%s\n" % (channel["number"]))
|
||||
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % (play_url, "Play"))
|
||||
fakefile.write(" <td>%s</td>\n" % (channel["name"]))
|
||||
fakefile.write(" <td>%s</td>\n" % (channel["number"]))
|
||||
fakefile.write(" <td><img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % (channel["thumbnail"], channel["name"]))
|
||||
fakefile.write(" <td>%s</td>\n" % (channel["listing"][0]["title"]))
|
||||
fakefile.write(" <td><img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % (channel["listing"][0]["thumbnail"], channel["listing"][0]["title"]))
|
||||
fakefile.write(" <td>%s</td>\n" % (channel["listing"][0]["description"]))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(remaining_time)))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
channel_guide_html = fakefile.getvalue()
|
||||
|
||||
return channel_guide_html
|
||||
13
fHDHR/http/pages/htmlerror.py
Normal file
@ -0,0 +1,13 @@
|
||||
|
||||
class HTMLerror():
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
def get_html_error(self, message):
|
||||
htmlerror = """<html>
|
||||
<head></head>
|
||||
<body>
|
||||
<h2>{}</h2>
|
||||
</body>
|
||||
</html>"""
|
||||
return htmlerror.format(message)
|
||||
55
fHDHR/http/pages/index_html.py
Normal file
@ -0,0 +1,55 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Index_HTML():
|
||||
endpoints = ["/", "/index.html"]
|
||||
endpoint_name = "root"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Status</h4>")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
total_channels = self.fhdhr.device.channels.get_station_total()
|
||||
|
||||
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
||||
max_tuners = self.fhdhr.device.tuners.max_tuners
|
||||
|
||||
tableguts = [
|
||||
["Script Directory", str(self.fhdhr.config.dict["filedir"]["script_dir"])],
|
||||
["Config File", str(self.fhdhr.config.config_file)],
|
||||
["Cache Path", str(self.fhdhr.config.dict["filedir"]["cache_dir"])],
|
||||
["Total Channels", str(total_channels)],
|
||||
["Tuner Usage", "%s/%s" % (str(tuners_in_use), str(max_tuners))]
|
||||
]
|
||||
|
||||
for guts in tableguts:
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % (guts[0]))
|
||||
fakefile.write(" <td>%s</td>\n" % (guts[1]))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
64
fHDHR/http/pages/origin_html.py
Normal file
@ -0,0 +1,64 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Origin_HTML():
|
||||
endpoints = ["/origin", "/origin.html"]
|
||||
endpoint_name = "origin"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">%s Status</h4>" % (servicename))
|
||||
fakefile.write("\n")
|
||||
|
||||
# a list of 2 part lists containing button information
|
||||
button_list = [
|
||||
["Force Channel Update", "/api/channels?method=scan&redirect=%2Forigin"],
|
||||
]
|
||||
|
||||
fakefile.write("<div style=\"text-align: center;\">\n")
|
||||
for button_item in button_list:
|
||||
button_label = button_item[0]
|
||||
button_path = button_item[1]
|
||||
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
|
||||
fakefile.write("</div>\n")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
origin_status_dict = self.fhdhr.device.channels.get_origin_status()
|
||||
for key in list(origin_status_dict.keys()):
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % (str(key)))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(origin_status_dict[key])))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
total_channels = self.fhdhr.device.channels.get_station_total()
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % ("Total Channels"))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(total_channels)))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
87
fHDHR/http/pages/page_elements.py
Normal file
@ -0,0 +1,87 @@
|
||||
|
||||
|
||||
class fHDHR_Page_Elements():
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
self.location = self.fhdhr.device.cluster.location
|
||||
|
||||
def get(self, request):
|
||||
return {"top": self.pagetop(request), "end": self.pageend(request)}
|
||||
|
||||
def pagetop(self, request):
|
||||
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
|
||||
|
||||
upper_part = [
|
||||
"<!DOCTYPE html>",
|
||||
"<html>",
|
||||
|
||||
"<head>",
|
||||
"<title>%s</title>" % friendlyname,
|
||||
"<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">",
|
||||
"<style>",
|
||||
"table, th, td {",
|
||||
"border: 1px solid black;",
|
||||
"}",
|
||||
"</style>",
|
||||
"<link href=\"style.css\" rel=\"stylesheet\">",
|
||||
"</head>",
|
||||
"<h1 style=\"text-align: center;\">",
|
||||
"<span style=\"text-decoration: underline;\"><strong><em>%s</em></strong>" % friendlyname,
|
||||
"</span>",
|
||||
"<img class=\"pull-left\" src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % ("/favicon.ico", "fHDHR Logo"),
|
||||
"</h1>"
|
||||
"<br><br>",
|
||||
"<h2>"
|
||||
"<div>",
|
||||
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/", "fHDHR"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/origin", servicename),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/guide", "Guide"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/cluster", "Cluster"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/streams", "Streams"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/xmltv", "xmltv"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/version", "Version"),
|
||||
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/diagnostics", "Diagnostics"),
|
||||
|
||||
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/xmltv?method=get&source=origin", "xmltv"),
|
||||
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/m3u?method=get&channel=all", "m3u"),
|
||||
|
||||
"</div>",
|
||||
"<hr align=\"center\" width=\"100%\">"
|
||||
]
|
||||
fhdhr_list = self.fhdhr.device.cluster.cluster()
|
||||
locations = []
|
||||
for location in list(fhdhr_list.keys()):
|
||||
item_dict = {
|
||||
"base_url": fhdhr_list[location]["base_url"],
|
||||
"name": fhdhr_list[location]["name"]
|
||||
}
|
||||
if item_dict["base_url"] != self.location:
|
||||
locations.append(item_dict)
|
||||
if len(locations):
|
||||
upper_part.append("<div>")
|
||||
locations = sorted(locations, key=lambda i: i['name'])
|
||||
for location in locations:
|
||||
upper_part.append("<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % (location["base_url"], location["name"]))
|
||||
upper_part.append("</div>")
|
||||
upper_part.append("<hr align=\"center\" width=\"100%\">")
|
||||
|
||||
retmessage = request.args.get('retmessage', default=None, type=str)
|
||||
if retmessage:
|
||||
upper_part.append("<p>%s</p>" % retmessage)
|
||||
|
||||
return upper_part
|
||||
|
||||
def pageend(self, request):
|
||||
return [
|
||||
"</html>",
|
||||
"",
|
||||
|
||||
"<script>",
|
||||
"function OpenLink(NewURL) {",
|
||||
" window.open(NewURL, \"_self\");",
|
||||
"}",
|
||||
"</script>"
|
||||
]
|
||||
55
fHDHR/http/pages/streams_html.py
Normal file
@ -0,0 +1,55 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Streams_HTML():
|
||||
endpoints = ["/streams", "/streams.html"]
|
||||
endpoint_name = "streams"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Streams</h4>")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table style=\"width:100%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th>Tuner</th>\n")
|
||||
fakefile.write(" <th>Status</th>\n")
|
||||
fakefile.write(" <th>Channel</th>\n")
|
||||
fakefile.write(" <th>Method</th>\n")
|
||||
fakefile.write(" <th>Time Active</th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
tuner_status = self.fhdhr.device.tuners.status()
|
||||
for tuner in list(tuner_status.keys()):
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % (str(tuner)))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["status"])))
|
||||
if tuner_status[tuner]["status"] == "Active":
|
||||
fakefile.write(" <td>%s<img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">%s</td>\n" % (
|
||||
tuner_status[tuner]["epg"]["name"], tuner_status[tuner]["epg"]["thumbnail"], tuner_status[tuner]["epg"]["name"], str(tuner_status[tuner]["epg"]["number"])))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["method"])))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["Play Time"])))
|
||||
else:
|
||||
fakefile.write(" <td>%s</td>\n" % "N/A")
|
||||
fakefile.write(" <td>%s</td>\n" % "N/A")
|
||||
fakefile.write(" <td>%s</td>\n" % "N/A")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
53
fHDHR/http/pages/version_html.py
Normal file
@ -0,0 +1,53 @@
|
||||
import sys
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class Version_HTML():
|
||||
endpoints = ["/version", "/version.html"]
|
||||
endpoint_name = "version"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Version Information</h4>")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" <th></th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % ("fHDHR"))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.version)))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % ("Python"))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(sys.version)))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
if self.fhdhr.config.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % ("ffmpeg"))
|
||||
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.config.dict["ffmpeg"]["version"])))
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
56
fHDHR/http/pages/xmltv_html.py
Normal file
@ -0,0 +1,56 @@
|
||||
from flask import request
|
||||
from io import StringIO
|
||||
|
||||
|
||||
class xmlTV_HTML():
|
||||
endpoints = ["/xmltv"]
|
||||
endpoint_name = "xmltv"
|
||||
|
||||
def __init__(self, fhdhr, page_elements):
|
||||
self.fhdhr = fhdhr
|
||||
self.page_elements = page_elements
|
||||
|
||||
def __call__(self, *args):
|
||||
return self.get(*args)
|
||||
|
||||
def get(self, *args):
|
||||
|
||||
fakefile = StringIO()
|
||||
page_elements = self.page_elements.get(request)
|
||||
|
||||
for line in page_elements["top"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
fakefile.write("<h4 style=\"text-align: center;\">fHDHR xmltv Options</h4>")
|
||||
fakefile.write("\n")
|
||||
|
||||
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <th>Version</th>\n")
|
||||
fakefile.write(" <th>Link</th>\n")
|
||||
fakefile.write(" <th>Options</th>\n")
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for epg_method in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||
if epg_method not in [None, "None"]:
|
||||
epg_method_name = epg_method
|
||||
if epg_method == "origin":
|
||||
epg_method_name = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||
fakefile.write(" <tr>\n")
|
||||
fakefile.write(" <td>%s</td>\n" % (epg_method_name))
|
||||
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % ("/api/xmltv?method=get&source=" + epg_method, epg_method_name))
|
||||
|
||||
fakefile.write(" <td>\n")
|
||||
fakefile.write(" <div>\n")
|
||||
fakefile.write(
|
||||
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
|
||||
("/api/xmltv?method=update&source=" + epg_method + "&redirect=%2Fxmltv", "Update"))
|
||||
fakefile.write(" </div>\n")
|
||||
fakefile.write(" </td>\n")
|
||||
|
||||
fakefile.write(" </tr>\n")
|
||||
|
||||
for line in page_elements["end"]:
|
||||
fakefile.write(line + "\n")
|
||||
|
||||
return fakefile.getvalue()
|
||||
@ -1,36 +0,0 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
|
||||
class Logger():
|
||||
|
||||
def __init__(self, settings):
|
||||
self.config = settings
|
||||
|
||||
log_level = self.config.dict["logging"]["level"].upper()
|
||||
|
||||
# Create a custom logger
|
||||
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||
self.logger = logging.getLogger('fHDHR')
|
||||
log_file = os.path.join(self.config.internal["paths"]["logs_dir"], 'fHDHR.log')
|
||||
|
||||
# Create handlers
|
||||
# c_handler = logging.StreamHandler()
|
||||
f_handler = logging.FileHandler(log_file)
|
||||
# c_handler.setLevel(log_level)
|
||||
f_handler.setLevel(log_level)
|
||||
|
||||
# Create formatters and add it to handlers
|
||||
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||
# c_handler.setFormatter(c_format)
|
||||
f_handler.setFormatter(f_format)
|
||||
|
||||
# Add handlers to the logger
|
||||
# logger.addHandler(c_handler)
|
||||
self.logger.addHandler(f_handler)
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if hasattr(self.logger, name):
|
||||
return eval("self.logger.%s" % name)
|
||||
89
fHDHR/origin/__init__.py
Normal file
@ -0,0 +1,89 @@
|
||||
from .origin_service import OriginService
|
||||
from .origin_channels import OriginChannels
|
||||
from .origin_epg import OriginEPG
|
||||
|
||||
import fHDHR.exceptions
|
||||
|
||||
|
||||
class OriginEPG_StandIN():
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def update_epg(self, channels):
|
||||
return {}
|
||||
|
||||
|
||||
class OriginChannels_StandIN():
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def get_channels(self):
|
||||
return []
|
||||
|
||||
def get_channel_stream(self, chandict, allchandict):
|
||||
return [{"number": chandict["number"], "stream_url": None}], False
|
||||
|
||||
|
||||
class OriginServiceWrapper():
|
||||
|
||||
def __init__(self, settings, logger, web, db):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.web = web
|
||||
|
||||
self.servicename = settings.dict["main"]["servicename"]
|
||||
|
||||
self.setup_success = None
|
||||
self.setup()
|
||||
|
||||
def setup(self):
|
||||
|
||||
try:
|
||||
self.origin = OriginService(self.config, self.logger, self.web)
|
||||
self.setup_success = True
|
||||
self.logger.info("%s Setup Success" % self.servicename)
|
||||
except fHDHR.exceptions.OriginSetupError as e:
|
||||
self.logger.error(e)
|
||||
self.setup_success = False
|
||||
|
||||
if self.setup_success:
|
||||
self.channels = OriginChannels(self.config, self.origin, self.logger, self.web)
|
||||
self.epg = OriginEPG(self.config, self.logger, self.web)
|
||||
else:
|
||||
self.channels = OriginChannels_StandIN()
|
||||
self.epg = OriginEPG_StandIN()
|
||||
|
||||
def get_channels(self):
|
||||
return self.channels.get_channels()
|
||||
|
||||
def get_channel_stream(self, chandict, allchandict):
|
||||
return self.channels.get_channel_stream(chandict, allchandict)
|
||||
|
||||
def update_epg(self, channels):
|
||||
return self.epg.update_epg(channels)
|
||||
|
||||
def get_status_dict(self):
|
||||
|
||||
if self.setup_success:
|
||||
status_dict = {
|
||||
"Setup": "Success",
|
||||
}
|
||||
|
||||
try:
|
||||
full_status_dict = self.origin.get_status_dict()
|
||||
for status_key in list(full_status_dict.keys()):
|
||||
status_dict[status_key] = full_status_dict[status_key]
|
||||
return status_dict
|
||||
except AttributeError:
|
||||
return status_dict
|
||||
else:
|
||||
return {
|
||||
"Setup": "Failed",
|
||||
}
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if hasattr(self.origin, name):
|
||||
return eval("self.origin." + name)
|
||||
elif hasattr(self.channels, name):
|
||||
return eval("self.channels." + name)
|
||||
58
fHDHR/origin/origin_channels.py
Normal file
@ -0,0 +1,58 @@
|
||||
import xmltodict
|
||||
import json
|
||||
|
||||
|
||||
class OriginChannels():
|
||||
|
||||
def __init__(self, settings, origin, logger, web):
|
||||
self.config = settings
|
||||
self.origin = origin
|
||||
self.logger = logger
|
||||
self.web = web
|
||||
|
||||
def get_channels(self):
|
||||
|
||||
data_url = ('%s%s:%s/service?method=channel.list&sid=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
self.origin.sid
|
||||
))
|
||||
|
||||
data_req = self.origin.web.session.get(data_url)
|
||||
data_dict = xmltodict.parse(data_req.content)
|
||||
|
||||
if 'channels' not in list(data_dict['rsp'].keys()):
|
||||
self.logger.error("Could not retrieve channel list")
|
||||
return []
|
||||
|
||||
channel_o_list = data_dict['rsp']['channels']['channel']
|
||||
|
||||
channel_list = []
|
||||
for c in channel_o_list:
|
||||
dString = json.dumps(c)
|
||||
channel_dict = eval(dString)
|
||||
|
||||
clean_station_item = {
|
||||
"name": channel_dict["name"],
|
||||
"callsign": channel_dict["name"],
|
||||
"number": channel_dict["formatted-number"],
|
||||
"id": channel_dict["id"],
|
||||
}
|
||||
channel_list.append(clean_station_item)
|
||||
return channel_list
|
||||
|
||||
def get_channel_stream(self, chandict, allchandict):
|
||||
caching = True
|
||||
streamlist = []
|
||||
streamdict = {}
|
||||
streamurl = ('%s%s:%s/live?channel=%s&client=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(chandict["number"]),
|
||||
str(chandict["number"]),
|
||||
))
|
||||
streamdict = {"number": chandict["number"], "stream_url": streamurl}
|
||||
streamlist.append(streamdict)
|
||||
return streamlist, caching
|
||||
@ -1,47 +1,68 @@
|
||||
import datetime
|
||||
import xmltodict
|
||||
|
||||
import fHDHR.tools
|
||||
|
||||
|
||||
class Plugin_OBJ():
|
||||
class OriginEPG():
|
||||
|
||||
def __init__(self, channels, plugin_utils):
|
||||
self.plugin_utils = plugin_utils
|
||||
def __init__(self, settings, logger, web):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.web = web
|
||||
|
||||
self.channels = channels
|
||||
|
||||
self.origin = plugin_utils.origin
|
||||
def get_channel_thumbnail(self, channel_id):
|
||||
channel_thumb_url = ("%s%s:%s/service?method=channel.icon&channel_id=%s" %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(channel_id)
|
||||
))
|
||||
return channel_thumb_url
|
||||
|
||||
def get_content_thumbnail(self, content_id):
|
||||
item_thumb_url = ("%s%s:%s/service?method=channel.show.artwork&sid=%s&event_id=%s" %
|
||||
("https://" if self.fhdhr.config.dict["nextpvr"]["ssl"] else "http://",
|
||||
self.fhdhr.config.dict["nextpvr"]["address"],
|
||||
str(self.fhdhr.config.dict["nextpvr"]["port"]),
|
||||
self.fhdhr.config.dict["nextpvr"]["sid"],
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
self.config.dict["origin"]["sid"],
|
||||
str(content_id)
|
||||
))
|
||||
return item_thumb_url
|
||||
|
||||
def xmltimestamp_nextpvr(self, epochtime):
|
||||
xmltime = datetime.datetime.fromtimestamp(int(epochtime)/1000)
|
||||
xmltime = str(xmltime.strftime('%Y%m%d%H%M%S')) + " +0000"
|
||||
return xmltime
|
||||
|
||||
def duration_nextpvr_minutes(self, starttime, endtime):
|
||||
return ((int(endtime) - int(starttime))/1000/60)
|
||||
|
||||
def update_epg(self):
|
||||
def update_epg(self, fhdhr_channels):
|
||||
programguide = {}
|
||||
|
||||
for fhdhr_id in list(self.channels.list.keys()):
|
||||
chan_obj = self.channels.list[fhdhr_id]
|
||||
for c in fhdhr_channels.get_channels():
|
||||
|
||||
if str(chan_obj.number) not in list(programguide.keys()):
|
||||
cdict = fHDHR.tools.xmldictmaker(c, ["callsign", "name", "number", "id"])
|
||||
|
||||
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||
if str(cdict['number']) not in list(programguide.keys()):
|
||||
|
||||
programguide[str(cdict['number'])] = {
|
||||
"callsign": cdict["callsign"],
|
||||
"name": cdict["name"] or cdict["callsign"],
|
||||
"number": cdict["number"],
|
||||
"id": str(cdict["id"]),
|
||||
"thumbnail": self.get_channel_thumbnail(cdict['id']),
|
||||
"listing": [],
|
||||
}
|
||||
|
||||
epg_url = ('%s%s:%s/service?method=channel.listings&channel_id=%s' %
|
||||
("https://" if self.fhdhr.config.dict["nextpvr"]["ssl"] else "http://",
|
||||
self.fhdhr.config.dict["nextpvr"]["address"],
|
||||
str(self.fhdhr.config.dict["nextpvr"]["port"]),
|
||||
str(chan_obj.dict["origin_id"]),
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
str(cdict["id"]),
|
||||
))
|
||||
epg_req = self.fhdhr.web.session.get(epg_url)
|
||||
epg_req = self.web.session.get(epg_url)
|
||||
epg_dict = xmltodict.parse(epg_req.content)
|
||||
|
||||
for program_listing in epg_dict["rsp"]["listings"]:
|
||||
@ -51,8 +72,8 @@ class Plugin_OBJ():
|
||||
progdict = fHDHR.tools.xmldictmaker(program_item, ["start", "end", "title", "name", "subtitle", "rating", "description", "season", "episode", "id", "episodeTitle"])
|
||||
|
||||
clean_prog_dict = {
|
||||
"time_start": (int(progdict["start"]) / 1000),
|
||||
"time_end": (int(progdict["end"]) / 1000),
|
||||
"time_start": self.xmltimestamp_nextpvr(progdict["start"]),
|
||||
"time_end": self.xmltimestamp_nextpvr(progdict["end"]),
|
||||
"duration_minutes": self.duration_nextpvr_minutes(progdict["start"], progdict["end"]),
|
||||
"thumbnail": self.get_content_thumbnail(progdict['id']),
|
||||
"title": progdict['name'] or "Unavailable",
|
||||
@ -65,7 +86,7 @@ class Plugin_OBJ():
|
||||
"seasonnumber": progdict['season'],
|
||||
"episodenumber": progdict['episode'],
|
||||
"isnew": False,
|
||||
"id": str(progdict['id'] or "%s_%s" % (chan_obj.dict['origin_id'], progdict["start"])),
|
||||
"id": str(progdict['id'] or self.xmltimestamp_nextpvr(progdict["start"])),
|
||||
}
|
||||
|
||||
if 'genre' in list(progdict.keys()):
|
||||
@ -78,7 +99,6 @@ class Plugin_OBJ():
|
||||
|
||||
# TODO isNEW
|
||||
|
||||
if not any((d['time_start'] == clean_prog_dict['time_start'] and d['id'] == clean_prog_dict['id']) for d in programguide[chan_obj.number]["listing"]):
|
||||
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
|
||||
programguide[str(cdict["number"])]["listing"].append(clean_prog_dict)
|
||||
|
||||
return programguide
|
||||
71
fHDHR/origin/origin_service.py
Normal file
@ -0,0 +1,71 @@
|
||||
import xmltodict
|
||||
import hashlib
|
||||
|
||||
import fHDHR.tools
|
||||
import fHDHR.exceptions
|
||||
|
||||
|
||||
class OriginService():
|
||||
|
||||
def __init__(self, settings, logger, web):
|
||||
self.config = settings
|
||||
self.logger = logger
|
||||
self.web = web
|
||||
self.login()
|
||||
|
||||
def login(self):
|
||||
self.logger.info("Logging into NextPVR")
|
||||
self.sid = self.get_sid()
|
||||
if not self.sid:
|
||||
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
|
||||
else:
|
||||
self.logger.info("NextPVR Login Success")
|
||||
self.config.write(self.config.dict["main"]["dictpopname"], 'sid', self.sid)
|
||||
|
||||
def get_sid(self):
|
||||
if self.config.dict["origin"]["sid"]:
|
||||
return self.config.dict["origin"]["sid"]
|
||||
|
||||
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
))
|
||||
|
||||
initiate_req = self.web.session.get(initiate_url)
|
||||
initiate_dict = xmltodict.parse(initiate_req.content)
|
||||
|
||||
sid = initiate_dict['rsp']['sid']
|
||||
salt = initiate_dict['rsp']['salt']
|
||||
md5PIN = hashlib.md5(str(self.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
|
||||
string = ':%s:%s' % (md5PIN, salt)
|
||||
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
|
||||
|
||||
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
sid,
|
||||
clientKey
|
||||
))
|
||||
login_req = self.web.session.get(login_url)
|
||||
login_dict = xmltodict.parse(login_req.content)
|
||||
|
||||
loginsuccess = None
|
||||
if login_dict['rsp']['@stat'] == "ok":
|
||||
if login_dict['rsp']['allow_watch'] == "true":
|
||||
loginsuccess = sid
|
||||
|
||||
return loginsuccess
|
||||
|
||||
def get_status_dict(self):
|
||||
nextpvr_address = ('%s%s:%s' %
|
||||
("https://" if self.config.dict["origin"]["ssl"] else "http://",
|
||||
self.config.dict["origin"]["address"],
|
||||
str(self.config.dict["origin"]["port"]),
|
||||
))
|
||||
ret_status_dict = {
|
||||
"Login": "Success",
|
||||
"Address": nextpvr_address,
|
||||
}
|
||||
return ret_status_dict
|
||||
@ -1,48 +0,0 @@
|
||||
|
||||
import fHDHR.exceptions
|
||||
|
||||
|
||||
class Origin_StandIN():
|
||||
def __init__(self):
|
||||
self.setup_success = False
|
||||
|
||||
def get_channels(self):
|
||||
return []
|
||||
|
||||
def get_channel_stream(self, chandict, stream_args):
|
||||
return None
|
||||
|
||||
|
||||
class Origins():
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.origins_dict = {}
|
||||
self.origin_selfadd()
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"] and self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod_type"] == "origin":
|
||||
self.fhdhr.plugins.plugins[plugin_name].plugin_utils.origin = self.origins_dict[self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"].lower()]
|
||||
|
||||
@property
|
||||
def valid_origins(self):
|
||||
return [origin for origin in list(self.origins_dict.keys())]
|
||||
|
||||
def origin_selfadd(self):
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].type == "origin":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||
try:
|
||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||
self.origins_dict[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(plugin_utils)
|
||||
self.fhdhr.logger.info("%s Setup Success" % method)
|
||||
self.origins_dict[method].setup_success = True
|
||||
except fHDHR.exceptions.OriginSetupError as e:
|
||||
self.fhdhr.logger.error(e)
|
||||
self.origins_dict[method] = Origin_StandIN()
|
||||
|
||||
if not hasattr(self.origins_dict[method], 'tuners'):
|
||||
self.origins_dict[method].tuners = 4
|
||||
|
||||
if not hasattr(self.origins_dict[method], 'stream_method'):
|
||||
self.origins_dict[method].stream_method = self.fhdhr.config.dict["streaming"]["method"]
|
||||
@ -1,250 +0,0 @@
|
||||
import os
|
||||
import imp
|
||||
import json
|
||||
|
||||
|
||||
class Plugin_DB():
|
||||
def __init__(self, db, name):
|
||||
self._db = db
|
||||
self.name = name
|
||||
self.namespace = name.lower()
|
||||
|
||||
# fhdhr
|
||||
def set_fhdhr_value(self, pluginitem, key, value, namespace="default"):
|
||||
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
||||
return
|
||||
|
||||
def get_fhdhr_value(self, pluginitem, key, namespace="default"):
|
||||
return self._db.get_fhdhr_value(pluginitem, key, namespace=namespace.lower())
|
||||
|
||||
def delete_fhdhr_value(self, pluginitem, key, namespace="default"):
|
||||
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
|
||||
return
|
||||
|
||||
# Plugin
|
||||
def set_plugin_value(self, pluginitem, key, value, namespace=None):
|
||||
if not namespace:
|
||||
namespace = self.namespace
|
||||
elif namespace.lower() != self.namespace:
|
||||
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
||||
return
|
||||
return self._db.set_plugin_value(pluginitem, key, value, namespace=self.namespace)
|
||||
|
||||
def get_plugin_value(self, pluginitem, key, namespace=None):
|
||||
if not namespace:
|
||||
namespace = self.namespace
|
||||
return self._db.get_plugin_value(pluginitem, key, namespace=namespace.lower())
|
||||
|
||||
def delete_plugin_value(self, pluginitem, key, namespace=None):
|
||||
if not namespace:
|
||||
namespace = self.namespace
|
||||
elif namespace.lower() != self.namespace:
|
||||
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
|
||||
return
|
||||
return self._db.delete_plugin_value(pluginitem, key, namespace=self.namespace)
|
||||
|
||||
|
||||
class Plugin_Config():
|
||||
def __init__(self, config, name):
|
||||
self._config = config
|
||||
self.name = name
|
||||
self.namespace = name.lower()
|
||||
|
||||
@property
|
||||
def dict(self):
|
||||
return self._config.dict.copy()
|
||||
|
||||
@property
|
||||
def internal(self):
|
||||
return self._config.internal.copy()
|
||||
|
||||
@property
|
||||
def conf_default(self):
|
||||
return self._config.conf_default.copy()
|
||||
|
||||
def write(self, key, value, namespace=None):
|
||||
if not namespace:
|
||||
namespace = self.namespace
|
||||
elif str(namespace).lower() != self.namespace:
|
||||
print("%s plugin is not allowed write access to fhdhr config namespaces." % self.name)
|
||||
return
|
||||
return self._config.write(key, value, self.namespace)
|
||||
|
||||
|
||||
class Plugin_Utils():
|
||||
|
||||
def __init__(self, config, logger, db, plugin_name, plugin_manifest, modname):
|
||||
self.config = Plugin_Config(config, plugin_manifest["name"])
|
||||
self.db = Plugin_DB(db, plugin_manifest["name"])
|
||||
self.logger = logger
|
||||
self.namespace = plugin_manifest["name"].lower()
|
||||
self.plugin_name = plugin_name
|
||||
self.plugin_manifest = plugin_manifest
|
||||
self.origin = None
|
||||
|
||||
|
||||
class Plugin():
|
||||
|
||||
def __init__(self, config, logger, db, plugin_name, plugin_path, plugin_conf, plugin_manifest):
|
||||
self.config = config
|
||||
self.db = db
|
||||
self.logger = logger
|
||||
|
||||
# Gather Info about Plugin
|
||||
self.plugin_name = plugin_name
|
||||
self.modname = os.path.basename(plugin_path)
|
||||
self.path = plugin_path
|
||||
self.module_type = imp.PKG_DIRECTORY
|
||||
self.multi_plugin = (self.plugin_name != self.modname)
|
||||
self.default_conf = plugin_conf
|
||||
self.manifest = plugin_manifest
|
||||
|
||||
if self.multi_plugin:
|
||||
self.plugin_dict_name = "%s_%s" % (plugin_name, self.modname)
|
||||
else:
|
||||
self.plugin_dict_name = plugin_name
|
||||
|
||||
self.plugin_utils = Plugin_Utils(config, logger, db, plugin_name, plugin_manifest, self.modname)
|
||||
|
||||
# Load the module
|
||||
self._module = self._load()
|
||||
|
||||
def setup(self):
|
||||
|
||||
if self.type == "alt_epg":
|
||||
self.config.register_valid_epg_method(self.name, self.plugin_dict_name)
|
||||
elif self.type == "alt_stream":
|
||||
self.config.register_valid_streaming_method(self.name, self.plugin_dict_name)
|
||||
elif self.type == "web":
|
||||
self.config.register_web_path(self.manifest["name"], self.path, self.plugin_dict_name)
|
||||
|
||||
if self.has_setup():
|
||||
self._module.setup(self)
|
||||
|
||||
def has_setup(self):
|
||||
return hasattr(self._module, 'setup')
|
||||
|
||||
def _load(self):
|
||||
description = ('', '', self.module_type)
|
||||
mod = imp.load_module(self.plugin_dict_name, None, self.path, description)
|
||||
return mod
|
||||
|
||||
@property
|
||||
def name(self):
|
||||
return self.manifest["name"]
|
||||
|
||||
@property
|
||||
def version(self):
|
||||
return self.manifest["version"]
|
||||
|
||||
@property
|
||||
def type(self):
|
||||
return self.manifest["type"]
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if name == "Plugin_OBJ":
|
||||
return self._module.Plugin_OBJ
|
||||
|
||||
|
||||
class PluginsHandler():
|
||||
|
||||
def __init__(self, settings):
|
||||
self.config = settings
|
||||
|
||||
self.plugins = {}
|
||||
|
||||
self.found_plugins = []
|
||||
self.found_plugins_conf = []
|
||||
self.list_plugins()
|
||||
|
||||
def setup(self):
|
||||
for plugin_name in list(self.plugins.keys()):
|
||||
self.plugins[plugin_name].setup()
|
||||
|
||||
def load_plugin_configs(self):
|
||||
for file_item_path in self.found_plugins_conf:
|
||||
self.config.import_conf_json(file_item_path)
|
||||
|
||||
def list_plugins(self):
|
||||
for directory in self.config.internal["paths"]["plugins_dir"]:
|
||||
|
||||
base = os.path.abspath(directory)
|
||||
for filename in os.listdir(base):
|
||||
abspath = os.path.join(base, filename)
|
||||
|
||||
if os.path.isdir(abspath):
|
||||
|
||||
plugin_conf = []
|
||||
for subfilename in os.listdir(abspath):
|
||||
subabspath = os.path.join(abspath, subfilename)
|
||||
if subfilename.endswith("_conf.json"):
|
||||
plugin_conf.append(subabspath)
|
||||
self.found_plugins_conf.append(subabspath)
|
||||
|
||||
# Plugin/multi-plugin must have a basic manifest json
|
||||
conffilepath = os.path.join(abspath, 'plugin.json')
|
||||
if os.path.isfile(conffilepath):
|
||||
plugin_manifest = json.load(open(conffilepath, 'r'))
|
||||
|
||||
for plugin_man_item in ["name", "version", "type"]:
|
||||
if plugin_man_item not in list(plugin_manifest.keys()):
|
||||
plugin_manifest[plugin_man_item] = None
|
||||
|
||||
self.config.register_version(os.path.basename(filename), plugin_manifest["version"], "plugin")
|
||||
|
||||
if plugin_manifest["type"] == "origin":
|
||||
self.config.register_valid_origin_method(plugin_manifest["name"])
|
||||
|
||||
plugin_import_print_string = "Found %s type plugin: %s %s. " % (plugin_manifest["type"], plugin_manifest["name"], plugin_manifest["version"])
|
||||
|
||||
# Warn for multiple origins
|
||||
if plugin_manifest["type"] == "origin" and len([plugin_name for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins if plugin_manifest["type"] == "origin"]):
|
||||
plugin_import_print_string += " ImportWarning: Only one Origin Allowed."
|
||||
|
||||
if not any(plugin_manifest[plugin_item] for plugin_item in ["name", "version", "type"]):
|
||||
plugin_import_print_string += " ImportWarning: Missing PLUGIN_* Value."
|
||||
else:
|
||||
|
||||
# Single Plugin
|
||||
if os.path.isfile(os.path.join(abspath, '__init__.py')):
|
||||
plugin_manifest["tagged_mod"] = None
|
||||
plugin_manifest["tagged_mod_type"] = None
|
||||
self.found_plugins.append((os.path.basename(filename), abspath, plugin_conf, plugin_manifest))
|
||||
|
||||
else:
|
||||
|
||||
# Multi-Plugin
|
||||
for subfilename in os.listdir(abspath):
|
||||
subabspath = os.path.join(abspath, subfilename)
|
||||
|
||||
if os.path.isdir(subabspath):
|
||||
|
||||
subconffilepath = os.path.join(subabspath, 'plugin.json')
|
||||
if os.path.isfile(subconffilepath):
|
||||
subplugin_manifest = json.load(open(subconffilepath, 'r'))
|
||||
|
||||
for subplugin_man_item in ["name", "version", "type"]:
|
||||
if subplugin_man_item not in list(subplugin_manifest.keys()):
|
||||
subplugin_manifest[subplugin_man_item] = plugin_manifest[subplugin_man_item]
|
||||
else:
|
||||
subplugin_manifest = plugin_manifest
|
||||
|
||||
subplugin_manifest["tagged_mod"] = None
|
||||
subplugin_manifest["tagged_mod_type"] = None
|
||||
if plugin_manifest["type"] != subplugin_manifest["type"]:
|
||||
subplugin_manifest["tagged_mod"] = plugin_manifest["name"]
|
||||
subplugin_manifest["tagged_mod_type"] = plugin_manifest["type"]
|
||||
|
||||
if os.path.isfile(os.path.join(subabspath, '__init__.py')):
|
||||
self.found_plugins.append((os.path.basename(filename), subabspath, plugin_conf, subplugin_manifest))
|
||||
|
||||
print(plugin_import_print_string)
|
||||
self.load_plugin_configs()
|
||||
|
||||
def load_plugins(self, logger, db):
|
||||
self.logger = logger
|
||||
self.db = db
|
||||
for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins:
|
||||
plugin_item = Plugin(self.config, self.logger, self.db, plugin_name, plugin_path, plugin_conf, plugin_manifest)
|
||||
self.plugins[plugin_item.plugin_dict_name] = plugin_item
|
||||
@ -1,5 +1,3 @@
|
||||
import os
|
||||
import re
|
||||
import ast
|
||||
import requests
|
||||
import xml.etree.ElementTree
|
||||
@ -8,32 +6,8 @@ UNARY_OPS = (ast.UAdd, ast.USub)
|
||||
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
||||
|
||||
|
||||
def channel_sort(channel_list):
|
||||
"""Take a list of channel number strings and sort the Numbers and SubNumbers"""
|
||||
chan_dict_list_split = {}
|
||||
for number in channel_list:
|
||||
try:
|
||||
subnumber = number.split(".")[1]
|
||||
except IndexError:
|
||||
subnumber = None
|
||||
prinumber = number.split(".")[0]
|
||||
chan_dict_list_split[number] = {"number": prinumber, "subnumber": subnumber}
|
||||
return sorted(chan_dict_list_split, key=lambda i: (int(chan_dict_list_split[i]['number']), int(chan_dict_list_split[i]['subnumber'] or 0)))
|
||||
|
||||
|
||||
def is_docker():
|
||||
path = "/proc/self/cgroup"
|
||||
if not os.path.isfile(path):
|
||||
return False
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
if re.match("\d+:[\w=]+:/docker(-[ce]e)?/\w+", line):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def sub_el(parent, sub_el_item_name, text=None, **kwargs):
|
||||
el = xml.etree.ElementTree.SubElement(parent, sub_el_item_name, **kwargs)
|
||||
def sub_el(parent, name, text=None, **kwargs):
|
||||
el = xml.etree.ElementTree.SubElement(parent, name, **kwargs)
|
||||
if text:
|
||||
el.text = text
|
||||
return el
|
||||
@ -102,14 +76,6 @@ def hours_between_datetime(first_time, later_time):
|
||||
return (timebetween.total_seconds() / 60 / 60)
|
||||
|
||||
|
||||
def humanized_filesize(size, decimal_places=2):
|
||||
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']:
|
||||
if size < 1024.0 or unit == 'YiB':
|
||||
break
|
||||
size /= 1024.0
|
||||
return f"{size:.{decimal_places}f} {unit}"
|
||||
|
||||
|
||||
def humanized_time(countdownseconds):
|
||||
time = float(countdownseconds)
|
||||
if time == 0:
|
||||
@ -133,9 +99,9 @@ def humanized_time(countdownseconds):
|
||||
if currenttimevar > 1:
|
||||
timetype = str(x+"s")
|
||||
if displaymsg:
|
||||
displaymsg = "%s %s %s" % (displaymsg, int(currenttimevar), timetype)
|
||||
displaymsg = str(displaymsg + " " + str(int(currenttimevar)) + " " + timetype)
|
||||
else:
|
||||
displaymsg = "%s %s" % (int(currenttimevar), timetype)
|
||||
displaymsg = str(str(int(currenttimevar)) + " " + timetype)
|
||||
if not displaymsg:
|
||||
return "just now"
|
||||
return displaymsg
|
||||
@ -147,8 +113,3 @@ class WebReq():
|
||||
def __init__(self):
|
||||
self.session = requests.Session()
|
||||
self.exceptions = requests.exceptions
|
||||
|
||||
def __getattr__(self, name):
|
||||
''' will only get called for undefined attributes '''
|
||||
if hasattr(self.session, name):
|
||||
return eval("self.session.%s" % name)
|
||||
|
||||
@ -1,229 +0,0 @@
|
||||
from gevent.pywsgi import WSGIServer
|
||||
from flask import Flask, request, session
|
||||
import threading
|
||||
import uuid
|
||||
|
||||
from .pages import fHDHR_Pages
|
||||
from .files import fHDHR_Files
|
||||
from .brython import fHDHR_Brython
|
||||
from .api import fHDHR_API
|
||||
|
||||
|
||||
fHDHR_web_VERSION = "v0.8.1-beta"
|
||||
|
||||
|
||||
class fHDHR_HTTP_Server():
|
||||
app = None
|
||||
|
||||
def __init__(self, fhdhr):
|
||||
self.fhdhr = fhdhr
|
||||
|
||||
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
|
||||
|
||||
self.fhdhr.logger.info("Loading Flask.")
|
||||
|
||||
self.fhdhr.app = Flask("fHDHR", template_folder=self.template_folder)
|
||||
self.instance_id = str(uuid.uuid4())
|
||||
|
||||
# Allow Internal API Usage
|
||||
self.fhdhr.app.testing = True
|
||||
self.fhdhr.api.client = self.fhdhr.app.test_client()
|
||||
|
||||
# Set Secret Key For Sessions
|
||||
self.fhdhr.app.secret_key = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||
|
||||
self.route_list = {}
|
||||
|
||||
self.endpoints_obj = {}
|
||||
self.endpoints_obj["pages"] = fHDHR_Pages(fhdhr)
|
||||
self.endpoints_obj["files"] = fHDHR_Files(fhdhr)
|
||||
self.endpoints_obj["brython"] = fHDHR_Brython(fhdhr)
|
||||
self.endpoints_obj["api"] = fHDHR_API(fhdhr)
|
||||
|
||||
self.selfadd_web_plugins()
|
||||
for endpoint_type in list(self.endpoints_obj.keys()):
|
||||
self.fhdhr.logger.info("Loading HTTP %s Endpoints." % endpoint_type)
|
||||
self.add_endpoints(endpoint_type)
|
||||
|
||||
self.fhdhr.app.before_request(self.before_request)
|
||||
self.fhdhr.app.after_request(self.after_request)
|
||||
self.fhdhr.app.before_first_request(self.before_first_request)
|
||||
|
||||
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
|
||||
|
||||
def selfadd_web_plugins(self):
|
||||
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
|
||||
if self.fhdhr.plugins.plugins[plugin_name].type == "web":
|
||||
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
|
||||
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
|
||||
try:
|
||||
self.endpoints_obj[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
|
||||
def start(self):
|
||||
self.fhdhr.logger.info("Flask HTTP Thread Starting")
|
||||
self.fhdhr.threads["flask"].start()
|
||||
|
||||
def stop(self):
|
||||
self.fhdhr.logger.info("Flask HTTP Thread Stopping")
|
||||
self.http.stop()
|
||||
|
||||
def before_first_request(self):
|
||||
self.fhdhr.logger.info("HTTP Server Online.")
|
||||
|
||||
def before_request(self):
|
||||
|
||||
session["session_id"] = str(uuid.uuid4())
|
||||
session["instance_id"] = self.instance_id
|
||||
session["route_list"] = self.route_list
|
||||
|
||||
session["user_agent"] = request.headers.get('User-Agent')
|
||||
|
||||
session["is_internal_api"] = self.detect_internal_api(request)
|
||||
if session["is_internal_api"]:
|
||||
self.fhdhr.logger.debug("Client is using internal API call.")
|
||||
|
||||
session["is_mobile"] = self.detect_mobile(request)
|
||||
if session["is_mobile"]:
|
||||
self.fhdhr.logger.debug("Client is a mobile device.")
|
||||
|
||||
session["is_plexmediaserver"] = self.detect_plexmediaserver(request)
|
||||
if session["is_plexmediaserver"]:
|
||||
self.fhdhr.logger.debug("Client is a Plex Media Server.")
|
||||
|
||||
session["deviceauth"] = self.detect_plexmediaserver(request)
|
||||
|
||||
session["tuner_used"] = None
|
||||
|
||||
session["restart"] = False
|
||||
|
||||
self.fhdhr.logger.debug("Client %s requested %s Opening" % (request.method, request.path))
|
||||
|
||||
def after_request(self, response):
|
||||
|
||||
# Close Tuner if it was in use, and did not close already
|
||||
# if session["tuner_used"] is not None:
|
||||
# tuner = self.fhdhr.device.tuners.tuners[str(session["tuner_used"])]
|
||||
# if tuner.tuner_lock.locked():
|
||||
# self.fhdhr.logger.info("Shutting down Tuner #%s after Request." % session["tuner_used"])
|
||||
# tuner.close()
|
||||
|
||||
self.fhdhr.logger.debug("Client %s requested %s Closing" % (request.method, request.path))
|
||||
if not session["restart"]:
|
||||
return response
|
||||
else:
|
||||
return self.stop()
|
||||
|
||||
def detect_internal_api(self, request):
|
||||
user_agent = request.headers.get('User-Agent')
|
||||
if not user_agent:
|
||||
return False
|
||||
elif str(user_agent).lower().startswith("fhdhr"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def detect_deviceauth(self, request):
|
||||
return request.args.get('DeviceAuth', default=None, type=str)
|
||||
|
||||
def detect_mobile(self, request):
|
||||
user_agent = request.headers.get('User-Agent')
|
||||
phones = ["iphone", "android", "blackberry"]
|
||||
if not user_agent:
|
||||
return False
|
||||
elif any(phone in user_agent.lower() for phone in phones):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def detect_plexmediaserver(self, request):
|
||||
user_agent = request.headers.get('User-Agent')
|
||||
if not user_agent:
|
||||
return False
|
||||
elif str(user_agent).lower().startswith("plexmediaserver"):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def add_endpoints(self, index_name):
|
||||
|
||||
item_list = [x for x in dir(self.endpoints_obj[index_name]) if self.isapath(x)]
|
||||
endpoint_main = self.endpoints_obj[index_name]
|
||||
endpoint_main.fhdhr.version # dummy line
|
||||
for item in item_list:
|
||||
endpoints = eval("endpoint_main.%s.%s" % (item, "endpoints"))
|
||||
if isinstance(endpoints, str):
|
||||
endpoints = [endpoints]
|
||||
handler = eval("endpoint_main.%s" % item)
|
||||
endpoint_name = eval("endpoint_main.%s.%s" % (item, "endpoint_name"))
|
||||
|
||||
try:
|
||||
endpoint_methods = eval("endpoint_main.%s.%s" % (item, "endpoint_methods"))
|
||||
except AttributeError:
|
||||
endpoint_methods = ['GET']
|
||||
|
||||
try:
|
||||
endpoint_access_level = eval("endpoint_main.%s.%s" % (item, "endpoint_access_level"))
|
||||
except AttributeError:
|
||||
endpoint_access_level = 0
|
||||
|
||||
try:
|
||||
pretty_name = eval("endpoint_main.%s.%s" % (item, "pretty_name"))
|
||||
except AttributeError:
|
||||
pretty_name = endpoint_name
|
||||
|
||||
try:
|
||||
endpoint_category = eval("endpoint_main.%s.%s" % (item, "endpoint_category"))
|
||||
except AttributeError:
|
||||
endpoint_category = index_name
|
||||
|
||||
try:
|
||||
endpoint_default_parameters = eval("endpoint_main.%s.%s" % (item, "endpoint_default_parameters"))
|
||||
except AttributeError:
|
||||
endpoint_default_parameters = {}
|
||||
|
||||
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||
|
||||
if endpoint_category not in list(self.route_list.keys()):
|
||||
self.route_list[endpoint_category] = {}
|
||||
|
||||
if endpoint_name not in list(self.route_list[endpoint_category].keys()):
|
||||
self.route_list[endpoint_category][endpoint_name] = {}
|
||||
self.route_list[endpoint_category][endpoint_name]["name"] = endpoint_name
|
||||
self.route_list[endpoint_category][endpoint_name]["endpoints"] = endpoints
|
||||
self.route_list[endpoint_category][endpoint_name]["endpoint_methods"] = endpoint_methods
|
||||
self.route_list[endpoint_category][endpoint_name]["endpoint_access_level"] = endpoint_access_level
|
||||
self.route_list[endpoint_category][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
|
||||
self.route_list[endpoint_category][endpoint_name]["pretty_name"] = pretty_name
|
||||
self.route_list[endpoint_category][endpoint_name]["endpoint_category"] = endpoint_category
|
||||
|
||||
for endpoint in endpoints:
|
||||
self.add_endpoint(endpoint=endpoint,
|
||||
endpoint_name=endpoint_name,
|
||||
handler=handler,
|
||||
methods=endpoint_methods)
|
||||
|
||||
def isapath(self, item):
|
||||
not_a_page_list = ["fhdhr", "plugin_utils"]
|
||||
if item in not_a_page_list:
|
||||
return False
|
||||
elif item.startswith("__") and item.endswith("__"):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||
self.fhdhr.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||
|
||||
def run(self):
|
||||
|
||||
self.http = WSGIServer(self.fhdhr.api.address_tuple,
|
||||
self.fhdhr.app.wsgi_app,
|
||||
log=self.fhdhr.logger.logger,
|
||||
error_log=self.fhdhr.logger.logger)
|
||||
try:
|
||||
self.http.serve_forever()
|
||||
self.stop()
|
||||
except AttributeError:
|
||||
self.fhdhr.logger.info("HTTP Server Offline")
|
||||