first psuh
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM python:3.8-slim
|
||||||
|
|
||||||
|
RUN apt-get -qq update && \
|
||||||
|
apt-get -qq -y install ffmpeg gcc && \
|
||||||
|
apt-get autoclean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY ./ /app/
|
||||||
|
WORKDIR /app
|
||||||
|
RUN pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
ENTRYPOINT ["python3", "/app/main.py", "--config", "/app/config/config.ini"]
|
||||||
13
LICENSE
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
Version 2, December 2004
|
||||||
|
|
||||||
|
Copyright (C) 2020 Sam Zick <Sam@deathbybandaid.net>
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim or modified
|
||||||
|
copies of this license document, and changing it is allowed as long
|
||||||
|
as the name is changed.
|
||||||
|
|
||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||||
17
README.md
@ -1,2 +1,17 @@
|
|||||||
# fHDHR_LocalNow
|
<p align="center">fHDHR_NewsOn <img src="docs/images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
|
||||||
|
Welcome to the world of streaming content as a DVR device! We use some fancy python here to achieve a system of:
|
||||||
|
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
|
||||||
|
Please Check the [Docs](docs/README.md) for Installation information.
|
||||||
|
|
||||||
|
fHDHR is labeled as beta until we reach v1.0.0
|
||||||
|
|
||||||
|
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||||
|
|||||||
2
alternative_epg/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# pylama:ignore=W0401,W0611
|
||||||
|
# from .test import *
|
||||||
45
config.all.ini
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
# servicename = NewsOn
|
||||||
|
# reponame = fHDHR_NewsOn
|
||||||
|
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-NewsOn
|
||||||
|
# reporting_firmware_name = fHDHR_NewsOn
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[vlc]
|
||||||
|
# path = cvlc
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1048576
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
|
||||||
|
[newson]
|
||||||
|
# force_best = False
|
||||||
4
config.example.ini
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
0
data/cache/PLACEHOLDER
vendored
Normal file
BIN
data/garamond.ttf
Normal file
39
data/internal_config/database.json
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"database":{
|
||||||
|
"type":{
|
||||||
|
"value": "sqlite",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"driver":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"user":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"pass":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"host":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"name":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
45
data/internal_config/epg.json
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
{
|
||||||
|
"epg":{
|
||||||
|
"images":{
|
||||||
|
"value": "pass",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"method":{
|
||||||
|
"value": "blocks",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"update_frequency":{
|
||||||
|
"value": 43200,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"valid_epg_methods":{
|
||||||
|
"value": "None,blocks",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"reverse_days": {
|
||||||
|
"value": -1,
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"forward_days": {
|
||||||
|
"value": 7,
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"block_size": {
|
||||||
|
"value": 1800,
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
,
|
||||||
|
"xmltv_offset": {
|
||||||
|
"value": "+0000",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
74
data/internal_config/fhdhr.json
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{
|
||||||
|
"fhdhr":{
|
||||||
|
"address":{
|
||||||
|
"value": "0.0.0.0",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"discovery_address":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": 5004,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_manufacturer":{
|
||||||
|
"value": "BoronDust",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_model":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_ver":{
|
||||||
|
"value": "20201001",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_tuner_type":{
|
||||||
|
"value": "Antenna",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"device_auth":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"require_auth":{
|
||||||
|
"value": false,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"chanscan_on_start":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"friendlyname":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"stream_type":{
|
||||||
|
"value": "direct",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"tuner_count":{
|
||||||
|
"value": 4,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_name":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
data/internal_config/logging.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"logging":{
|
||||||
|
"level":{
|
||||||
|
"value": "INFO",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
34
data/internal_config/main.json
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
{
|
||||||
|
"main":{
|
||||||
|
"uuid":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"cache_dir":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"servicename":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"dictpopname":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"reponame":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"required":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
data/internal_config/rmg.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"rmg":{
|
||||||
|
"enabled":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
29
data/internal_config/ssdp.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"ssdp":{
|
||||||
|
"enabled":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"max_age":{
|
||||||
|
"value": 1800,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"proto":{
|
||||||
|
"value": "ipv4",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"iface":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"multicast_address":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
28
data/internal_config/streaming.json
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
{
|
||||||
|
"streaming":{
|
||||||
|
"bytes_per_read": {
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"quality": {
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ffmpeg":{
|
||||||
|
"path":{
|
||||||
|
"value": "ffmpeg",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vlc":{
|
||||||
|
"path":{
|
||||||
|
"value": "cvlc",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
129
docs/ADV_Config.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Here, we'll break down all of the configuration options per section.
|
||||||
|
|
||||||
|
## Main
|
||||||
|
Here's the `main` section.
|
||||||
|
* `uuid` will be created automatically, you need not worry about this.
|
||||||
|
* `cache_dir` is handy for keeping cached files out of the script directory. This is helpful for reinstalls as well as development.
|
||||||
|
|
||||||
|
````
|
||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
````
|
||||||
|
|
||||||
|
## fhdhr
|
||||||
|
|
||||||
|
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
||||||
|
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
||||||
|
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
||||||
|
* `tuner_count` is a limit of devices able to stream from the script.
|
||||||
|
* `friendlyname` is to set the name that Plex sees the script as.
|
||||||
|
* `stream_type` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||||
|
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-NewsOn
|
||||||
|
# reporting_firmware_name = fHDHR_NewsOn
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
````
|
||||||
|
|
||||||
|
# EPG
|
||||||
|
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
||||||
|
* `method` defaults to `origin` and will pull the xmltv data from NewsOn. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
||||||
|
* `update_frequency` * `epg_update_frequency` determines how often we check for new scheduling information. In Seconds.
|
||||||
|
|
||||||
|
````
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
````
|
||||||
|
|
||||||
|
## ffmpeg
|
||||||
|
|
||||||
|
The `ffmpeg` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## vlc
|
||||||
|
|
||||||
|
The `vlc` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[vlc]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## direct_stream
|
||||||
|
|
||||||
|
The `direct_stream` section is for when you set the `[fhdhr]stream_type` to `direct`
|
||||||
|
* `chunksize` is how much data to read at a time.
|
||||||
|
|
||||||
|
````
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1024*1024
|
||||||
|
````
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
* `level` determines the amount of logging you wish to see in the console, as well as to the logfile (stored in your cache directory).
|
||||||
|
|
||||||
|
````
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
````
|
||||||
|
|
||||||
|
# Database
|
||||||
|
* experiment with these settings at your own risk. We use sqlalchemy to provide database options, but we default to sqlite.
|
||||||
|
|
||||||
|
TODO: improve documentation here.
|
||||||
|
|
||||||
|
````
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
````
|
||||||
|
|
||||||
|
## NewsOn
|
||||||
|
The `newson` section
|
||||||
|
|
||||||
|
````
|
||||||
|
[newson]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
41
docs/Config.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The example config file contains all of the things that the typical user may need to fill out.
|
||||||
|
|
||||||
|
Please see the Advanced Configuration page for more information.
|
||||||
|
|
||||||
|
## fHDHR
|
||||||
|
|
||||||
|
Under `fhdhr`, you'll find 2 addresses listed. `0.0.0.0` works great for a listen address, however, it seems that SSDP works best if the discovery address is set to the IP to say that there is a service at.
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
````
|
||||||
|
|
||||||
|
## NewsOn
|
||||||
|
|
||||||
|
NewsOn requires signin credentials, so add those.
|
||||||
|
|
||||||
|
````
|
||||||
|
[newson]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
15
docs/Origin.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This varient of fHDHR connects to [NewsOn](https://newson.us/about).
|
||||||
46
docs/README.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# The Boring Disclaimers (at the top of the docs for a reason)
|
||||||
|
|
||||||
|
|
||||||
|
fHDHR is a Python service to take various sources of video and make them accessible to client software including, but not limited to:
|
||||||
|
|
||||||
|
* [Plex](https://www.plex.tv/)
|
||||||
|
* [Emby](https://emby.media/)
|
||||||
|
* [Jellyfin](https://jellyfin.org/)
|
||||||
|
* [Channels](https://getchannels.com/)
|
||||||
|
|
||||||
|
fHDHR is not directly affiliated with the above client software, and you will receive NO support for this script via their forums.
|
||||||
|
|
||||||
|
fHDHR is able to connect to clients by emulating a piece of hardware called the [HDHomeRun from SiliconDust](https://www.silicondust.com/). fHDHR is in NO way affiliated with SiliconDust, and is NOT a HDHomeRun device. fHDHR simply uses the API structure used by the authentic HDHomeRun to connect to client DVR solutions.
|
||||||
|
|
||||||
|
# History
|
||||||
|
|
||||||
|
I got the Huappage QuadHD, and the Mohu Sail as a pandemic-project. All was fine working within Plex, but I also have emby setup as a backup to Plex when auth is broken.
|
||||||
|
|
||||||
|
I thought to myself, "Self, I should look on github for a way to share my tv tuner between the two".
|
||||||
|
|
||||||
|
That's when I tried both npvrProxy with NextPVR as well as tvhProxy with TVHeadend. I had to tinker with both to get them working, but I started testing which one I liked more.
|
||||||
|
|
||||||
|
Around this same time, I stumbled upon [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex). I wanted to contribute to that project to get it to a point that I could fork it to work for other video stream sources.
|
||||||
|
|
||||||
|
The locast2plex code development wasn't going quite fast enough for the feature-creep in my head.
|
||||||
|
|
||||||
|
I then proceded to create the initial iteration of fHDHR which I originally called "FakeHDHR". I've rewritten the core functionality a few times before landing on the current code structure, which feels 'right'.
|
||||||
|
|
||||||
|
I've worked really hard to create a structure that simplifies new variants of the core code to work with different 'origin' streams. Combining these works really well with [xTeVe](https://github.com/xteve-project/xTeVe).
|
||||||
|
|
||||||
|
One of the variants goes as far as scraping a table from a PDF file for creating a channel guide!
|
||||||
|
|
||||||
|
I can easily create more variants of the project to do other video sources. Paid ones, I could potentially accept donations for, as I don't want to pay to develop for multiple platforms.
|
||||||
26
docs/Related-Projects.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
While the fHDHR reops share very little code from the below projects, they were a source of inspiration:
|
||||||
|
|
||||||
|
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||||
|
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||||
|
|
||||||
|
|
||||||
|
Aside from the above, these other projects are worth a look as well:
|
||||||
|
|
||||||
|
* [npvrProxy](https://github.com/rogueosb/npvrProxy)
|
||||||
|
* [xTeVe](https://xteve.de/)
|
||||||
|
* [telly](https://github.com/tellytv/telly)
|
||||||
|
* [dizquetv](https://github.com/vexorian/dizquetv)
|
||||||
129
docs/Usage.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Author Notes
|
||||||
|
|
||||||
|
* All Testing is currently done in Proxmox LXC, Ubuntu 20.04, Python 3.8
|
||||||
|
|
||||||
|
|
||||||
|
# Prerequisites
|
||||||
|
|
||||||
|
* A Linux or Mac "Server". Windows currently does not work. A "Server" is a computer that is typically always online.
|
||||||
|
* Python 3.7 or later.
|
||||||
|
* Consult [This Page](Origin.md) for additional setup specific to this variant of fHDHR.
|
||||||
|
|
||||||
|
|
||||||
|
# Optional Prerequisites
|
||||||
|
* If you intend to use Docker, [This Guide](https://docs.docker.com/get-started/) should help you get started. The author of fHDHR is not a docker user, but will still try to help.
|
||||||
|
|
||||||
|
fHDHR uses direct connections with video sources by default. Alternatively, you can install and update the [config](Config.md) accordingly. You will need to make these available to your systems PATH, or manually set their path via the config file.
|
||||||
|
|
||||||
|
* ffmpeg
|
||||||
|
* vlc
|
||||||
|
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
## Linux
|
||||||
|
|
||||||
|
* Download the zip, or git clone
|
||||||
|
* Navigate into your script directory and run `pip3 install -r requirements.txt`
|
||||||
|
* Copy the included `config.example.ini` file to a known location. The script will not run without this. There is no default configuration file location. [Modify the configuration file to suit your needs.](Config.md)
|
||||||
|
|
||||||
|
* Run with `python3 main.py -c=` and the path to the config file.
|
||||||
|
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
This portion of the guide assumes you are using a Linux system with both docker and docker-compose installed. This (or some variation thereof) may work on Mac or Windows, but has not been tested.
|
||||||
|
|
||||||
|
* this guide assumes we wish to use the `~/fhdhr` directory for our install (you can use whatever directory you like, just make the appropriate changes elsewhere in this guide) and that we are installing for NewsOn support
|
||||||
|
* run the following commands to clone the repo into `~/fhdhr/fHDHR_NewsOn`
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_NewsOn.git
|
||||||
|
```
|
||||||
|
* create your config.ini file (as described earlier in this guide) in the `~/fhdhr/fHDHR_NewsOn` directory
|
||||||
|
* while still in the `~/fhdhr` directory, create the following `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
newson:
|
||||||
|
build: ./fHDHR_NewsOn
|
||||||
|
container_name: newson
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_NewsOn/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d newson
|
||||||
|
```
|
||||||
|
|
||||||
|
After a short period of time (during which docker will build your new fHDHR container), you should now have a working build of fHDHR running inside a docker container.
|
||||||
|
|
||||||
|
As the code changes and new versions / bug fixes are released, at any point you can pull the latest version of the code and rebuild your container with the following commands:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr/fHDHR_NewsOn
|
||||||
|
git checkout master
|
||||||
|
git pull
|
||||||
|
cd ~/fhdhr
|
||||||
|
docker-compose up --build -d newson
|
||||||
|
```
|
||||||
|
<hr />
|
||||||
|
|
||||||
|
You can also run multiple instances of fHDHR to support additional sources by cloning the appropriate repo into your `~/fhdhr` directory and adding the necessary services to the docker-compose file we created above.
|
||||||
|
|
||||||
|
* for example, if we also wanted NewsOn support, you would clone the NewsOn repository:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_NewsOn.git
|
||||||
|
```
|
||||||
|
* **NOTE**: if you are running multiple services on the same machine, you must change the port in your config.ini file for each one. For example, if NewsOn was using the default port of 5004, NewsOn cannot also use that port. You must change the port in your NewsOn config.ini file to something else (5005, for example).
|
||||||
|
* add newson as a service in your `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
newson:
|
||||||
|
build: ./fHDHR_NewsOn
|
||||||
|
container_name: newson
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_NewsOn/config.ini:/app/config/config.ini
|
||||||
|
|
||||||
|
newson:
|
||||||
|
build: ./fHDHR_NewsOn
|
||||||
|
container_name: newson
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_NewsOn/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d newson
|
||||||
|
```
|
||||||
|
|
||||||
|
You can repeat these instructions for as many fHDHR containers as your system resources will allow.
|
||||||
|
|
||||||
|
# Setup
|
||||||
|
|
||||||
|
Now that you have fHDHR running, You can navigate (in a web browser) to the IP:Port from the configuration step above.
|
||||||
|
|
||||||
|
If you did not setup a `discovery_address` in your config, SSDP will be disabled. This is not a problem as clients like Plex can have the IP:Port entered manually!
|
||||||
|
|
||||||
|
You can copy the xmltv link from the webUI and use that in your client software to provide Channel Guide information.
|
||||||
98
docs/WebUI.md
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [NewsOn](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This Page will introduce basic handling of the script from the Web Interface provided at IP:Port
|
||||||
|
|
||||||
|
The Pages are available in the buttons at the top, links to xmltv and m3u are provided at the top for ease of access.
|
||||||
|
|
||||||
|
|
||||||
|
# Main Landing Page
|
||||||
|
|
||||||
|
Below is the main landing page with basic information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_main.PNG" alt="Main Page"/>
|
||||||
|
|
||||||
|
# NewsOn
|
||||||
|
|
||||||
|
Here you will have access to some basic information about the service we are proxying.
|
||||||
|
|
||||||
|
The webUI will still work, even if setup didn't go smoothly.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_origin.PNG" alt="Origin Page"/>
|
||||||
|
|
||||||
|
# Guide
|
||||||
|
|
||||||
|
This Page give you information about what is currently playing on all stations. It will also show the time remaining for each item.
|
||||||
|
|
||||||
|
* Note: The Play link in the left hand column can be copied to play a channel in VLC media player!
|
||||||
|
|
||||||
|
<img src="screenshots/webui_guide.PNG" alt="Guide Page"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Cluster
|
||||||
|
|
||||||
|
Since SSDP is used for service discovery, I decided to also use it for ease of management.
|
||||||
|
|
||||||
|
This tab will not have the below options if SSDP isn't running.
|
||||||
|
|
||||||
|
Joining a cluster will provide a second row of buttons for the clustered servers.
|
||||||
|
|
||||||
|
Unjoined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_unjoined.PNG" alt="Cluster Page, UnJoined"/>
|
||||||
|
|
||||||
|
Joined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_joined.PNG" alt="Cluster Page, Joined"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Streams
|
||||||
|
|
||||||
|
This Page will show all active streams, and tuner information. You can also terminate a stream from here.
|
||||||
|
|
||||||
|
* Note: Clients will often have an amount buffered, and the connection termination is not immediate from a viewing perspective. However, the connection to the source is indeed cut off.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_streams.PNG" alt="Streams Page"/>
|
||||||
|
|
||||||
|
# xmltv
|
||||||
|
|
||||||
|
This page will give you access to all the xmltv formats provided by this varient.
|
||||||
|
|
||||||
|
From here, you can manually update or even clear the cached epg, and then update.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_xmltv.PNG" alt="xmltv Page"/>
|
||||||
|
|
||||||
|
# Version
|
||||||
|
|
||||||
|
This page will give valuable information about the environment the script is being run in.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_version.PNG" alt="Version Page"/>
|
||||||
|
|
||||||
|
# Diganostics
|
||||||
|
|
||||||
|
This page has various links to json/xml files that make the magic work, as well as debug and cluster information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
|
|
||||||
|
# Settings
|
||||||
|
|
||||||
|
This page allows viewing/changing all possible configuration options.
|
||||||
|
|
||||||
|
* Note: This will require a restart of the script to have any effect.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
BIN
docs/images/logo.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/screenshots/webui_cluster_joined.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_cluster_unjoined.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
docs/screenshots/webui_diagnostics.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_guide.PNG
Normal file
|
After Width: | Height: | Size: 137 KiB |
BIN
docs/screenshots/webui_main.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_origin.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_settings.PNG
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
docs/screenshots/webui_streams.PNG
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
docs/screenshots/webui_version.PNG
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/screenshots/webui_xmltv.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
39
fHDHR/__init__.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
from .originwrapper import OriginServiceWrapper
|
||||||
|
from .device import fHDHR_Device
|
||||||
|
from .api import fHDHR_API_URLs
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
|
||||||
|
fHDHR_VERSION = "v0.6.0-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_INT_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db):
|
||||||
|
self.version = fHDHR_VERSION
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
self.web = fHDHR.tools.WebReq()
|
||||||
|
|
||||||
|
self.api = fHDHR_API_URLs(settings, self.web)
|
||||||
|
|
||||||
|
self.threads = {}
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db, origin, alternative_epg):
|
||||||
|
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db)
|
||||||
|
|
||||||
|
self.originwrapper = OriginServiceWrapper(self.fhdhr, origin)
|
||||||
|
|
||||||
|
self.device = fHDHR_Device(self.fhdhr, self.originwrapper, alternative_epg)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr.%s" % name)
|
||||||
82
fHDHR/api/__init__.py
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Fillin_Client():
|
||||||
|
|
||||||
|
def __init__(self, settings, web):
|
||||||
|
self.config = settings
|
||||||
|
self.web = web
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.web.session, name):
|
||||||
|
return eval("self.web.session.%s" % name)
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API_URLs():
|
||||||
|
|
||||||
|
def __init__(self, settings, web):
|
||||||
|
self.config = settings
|
||||||
|
self.web = web
|
||||||
|
|
||||||
|
self.headers = {'User-Agent': "fHDHR/%s" % self.config.internal["versions"]["fHDHR"]}
|
||||||
|
|
||||||
|
# Replaced later
|
||||||
|
self.client = Fillin_Client(settings, web)
|
||||||
|
|
||||||
|
self.address = self.config.dict["fhdhr"]["address"]
|
||||||
|
self.discovery_address = self.config.dict["fhdhr"]["discovery_address"]
|
||||||
|
self.port = self.config.dict["fhdhr"]["port"]
|
||||||
|
|
||||||
|
def get(self, url, *args):
|
||||||
|
|
||||||
|
req_method = type(self.client).__name__
|
||||||
|
|
||||||
|
if not url.startswith("http"):
|
||||||
|
if not url.startswith("/"):
|
||||||
|
url = "/%s" % url
|
||||||
|
url = "%s%s" % (self.base, url)
|
||||||
|
|
||||||
|
if req_method == "FlaskClient":
|
||||||
|
self.client.get(url, headers=self.headers, *args)
|
||||||
|
else:
|
||||||
|
self.client.get(url, headers=self.headers, *args)
|
||||||
|
|
||||||
|
def post(self, url, *args):
|
||||||
|
|
||||||
|
req_method = type(self.client).__name__
|
||||||
|
|
||||||
|
if not url.startswith("http"):
|
||||||
|
if not url.startswith("/"):
|
||||||
|
url = "/%s" % url
|
||||||
|
url = "%s%s" % (self.base, url)
|
||||||
|
|
||||||
|
if req_method == "FlaskClient":
|
||||||
|
self.client.post(url, headers=self.headers, *args)
|
||||||
|
else:
|
||||||
|
self.client.post(url, headers=self.headers, *args)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base(self):
|
||||||
|
if self.discovery_address:
|
||||||
|
return ('http://%s:%s' % self.discovery_address_tuple)
|
||||||
|
elif self.address == "0.0.0.0":
|
||||||
|
return ('http://%s:%s' % self.address_tuple)
|
||||||
|
else:
|
||||||
|
return ('http://%s:%s' % self.address_tuple)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_quoted(self):
|
||||||
|
return urllib.parse.quote(self.base)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery_address_tuple(self):
|
||||||
|
return (self.discovery_address, int(self.port))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def localhost_address_tuple(self):
|
||||||
|
return ("127.0.0.1", int(self.port))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def address_tuple(self):
|
||||||
|
return (self.address, int(self.port))
|
||||||
0
fHDHR/cli/__init__.py
Normal file
101
fHDHR/cli/run.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
|
||||||
|
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||||
|
import fHDHR.exceptions
|
||||||
|
import fHDHR.config
|
||||||
|
from fHDHR.db import fHDHRdb
|
||||||
|
|
||||||
|
ERR_CODE = 1
|
||||||
|
ERR_CODE_NO_RESTART = 2
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info.major == 2 or sys.version_info < (3, 7):
|
||||||
|
print('Error: fHDHR requires python 3.7+.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
|
||||||
|
def build_args_parser():
|
||||||
|
"""Build argument parser for fHDHR"""
|
||||||
|
parser = argparse.ArgumentParser(description='fHDHR')
|
||||||
|
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_configuration(args, script_dir, origin, fHDHR_web):
|
||||||
|
if not os.path.isfile(args.cfg):
|
||||||
|
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||||
|
return fHDHR.config.Config(args.cfg, script_dir, origin, fHDHR_web)
|
||||||
|
|
||||||
|
|
||||||
|
def run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
|
||||||
|
fhdhr = fHDHR_OBJ(settings, logger, db, origin, alternative_epg)
|
||||||
|
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Start Flask Thread
|
||||||
|
fhdhrweb.start()
|
||||||
|
|
||||||
|
# Start SSDP Thread
|
||||||
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
fhdhr.device.ssdp.start()
|
||||||
|
|
||||||
|
# Start EPG Thread
|
||||||
|
if settings.dict["epg"]["method"]:
|
||||||
|
fhdhr.device.epg.start()
|
||||||
|
|
||||||
|
# Perform some actions now that HTTP Server is running
|
||||||
|
fhdhr.api.get("/api/startup_tasks")
|
||||||
|
|
||||||
|
# wait forever
|
||||||
|
restart_code = "restart"
|
||||||
|
while fhdhr.threads["flask"].is_alive():
|
||||||
|
time.sleep(1)
|
||||||
|
return restart_code
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
def start(args, script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
"""Get Configuration for fHDHR and start"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
settings = get_configuration(args, script_dir, origin, fHDHR_web)
|
||||||
|
except fHDHR.exceptions.ConfigurationError as e:
|
||||||
|
print(e)
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
logger = settings.logging_setup()
|
||||||
|
|
||||||
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
|
return run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg)
|
||||||
|
|
||||||
|
|
||||||
|
def main(script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
"""fHDHR run script entry point"""
|
||||||
|
|
||||||
|
print("Loading fHDHR %s" % fHDHR_VERSION)
|
||||||
|
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
||||||
|
print("Loading Origin Service: %s %s" % (origin.ORIGIN_NAME, origin.ORIGIN_VERSION))
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = build_args_parser()
|
||||||
|
while True:
|
||||||
|
returned_code = start(args, script_dir, fHDHR_web, origin, alternative_epg)
|
||||||
|
if returned_code not in ["restart"]:
|
||||||
|
return returned_code
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n\nInterrupted")
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
349
fHDHR/config/__init__.py
Normal file
@ -0,0 +1,349 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
import configparser
|
||||||
|
import pathlib
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
import json
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
from fHDHR import fHDHR_VERSION
|
||||||
|
from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
||||||
|
|
||||||
|
|
||||||
|
class Config():
|
||||||
|
|
||||||
|
def __init__(self, filename, script_dir, origin, fHDHR_web):
|
||||||
|
self.origin = origin
|
||||||
|
self.fHDHR_web = fHDHR_web
|
||||||
|
|
||||||
|
self.internal = {}
|
||||||
|
self.conf_default = {}
|
||||||
|
self.dict = {}
|
||||||
|
self.config_file = filename
|
||||||
|
|
||||||
|
self.initial_load(script_dir)
|
||||||
|
self.config_verification()
|
||||||
|
|
||||||
|
def initial_load(self, script_dir):
|
||||||
|
|
||||||
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
|
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
||||||
|
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
||||||
|
origin_dir = pathlib.Path(script_dir).joinpath('origin')
|
||||||
|
|
||||||
|
self.internal["paths"] = {
|
||||||
|
"script_dir": script_dir,
|
||||||
|
"data_dir": data_dir,
|
||||||
|
"alternative_epg": pathlib.Path(script_dir).joinpath('alternative_epg'),
|
||||||
|
"origin": origin_dir,
|
||||||
|
"origin_web": pathlib.Path(origin_dir).joinpath('origin_web'),
|
||||||
|
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||||
|
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||||
|
"fHDHR_web_dir": fHDHR_web_dir,
|
||||||
|
"www_dir": www_dir,
|
||||||
|
"www_templates_dir": pathlib.Path(fHDHR_web_dir).joinpath('templates'),
|
||||||
|
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||||
|
}
|
||||||
|
|
||||||
|
for conffile in os.listdir(self.internal["paths"]["internal_config"]):
|
||||||
|
conffilepath = os.path.join(self.internal["paths"]["internal_config"], conffile)
|
||||||
|
if str(conffilepath).endswith(".json"):
|
||||||
|
self.read_json_config(conffilepath)
|
||||||
|
|
||||||
|
for file_item in os.listdir(self.internal["paths"]["fHDHR_web_dir"]):
|
||||||
|
file_item_path = pathlib.Path(self.internal["paths"]["fHDHR_web_dir"]).joinpath(file_item)
|
||||||
|
if str(file_item_path).endswith("_conf.json"):
|
||||||
|
self.read_json_config(file_item_path)
|
||||||
|
|
||||||
|
for dir_type in ["alternative_epg", "origin"]:
|
||||||
|
|
||||||
|
for file_item in os.listdir(self.internal["paths"][dir_type]):
|
||||||
|
file_item_path = pathlib.Path(self.internal["paths"][dir_type]).joinpath(file_item)
|
||||||
|
if file_item_path.is_dir():
|
||||||
|
for sub_file_item in os.listdir(file_item_path):
|
||||||
|
sub_file_item_path = pathlib.Path(file_item_path).joinpath(sub_file_item)
|
||||||
|
if str(sub_file_item_path).endswith("_conf.json"):
|
||||||
|
self.read_json_config(sub_file_item_path)
|
||||||
|
else:
|
||||||
|
if str(file_item_path).endswith("_conf.json"):
|
||||||
|
self.read_json_config(file_item_path)
|
||||||
|
|
||||||
|
print("Loading Configuration File: %s" % self.config_file)
|
||||||
|
self.read_ini_config(self.config_file)
|
||||||
|
|
||||||
|
self.load_versions()
|
||||||
|
|
||||||
|
def load_versions(self):
|
||||||
|
|
||||||
|
self.internal["versions"] = {}
|
||||||
|
|
||||||
|
self.internal["versions"]["fHDHR"] = fHDHR_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["fHDHR_web"] = self.fHDHR_web.fHDHR_web_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"][self.origin.ORIGIN_NAME] = self.origin.ORIGIN_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["Python"] = sys.version
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
self.internal["versions"]["Operating System"] = opersystem
|
||||||
|
if opersystem in ["Linux", "Darwin"]:
|
||||||
|
# Linux/Mac
|
||||||
|
if os.getuid() == 0 or os.geteuid() == 0:
|
||||||
|
print('Warning: Do not run fHDHR with root privileges.')
|
||||||
|
elif opersystem in ["Windows"]:
|
||||||
|
# Windows
|
||||||
|
if os.environ.get("USERNAME") == "Administrator":
|
||||||
|
print('Warning: Do not run fHDHR as Administrator.')
|
||||||
|
else:
|
||||||
|
print("Uncommon Operating System, use at your own risk.")
|
||||||
|
|
||||||
|
isdocker = is_docker()
|
||||||
|
self.internal["versions"]["Docker"] = isdocker
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
try:
|
||||||
|
ffmpeg_command = [self.dict["ffmpeg"]["path"],
|
||||||
|
"-version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
ffmpeg_version = "Missing"
|
||||||
|
print("Failed to find ffmpeg.")
|
||||||
|
self.internal["versions"]["ffmpeg"] = ffmpeg_version
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "vlc":
|
||||||
|
try:
|
||||||
|
vlc_command = [self.dict["vlc"]["path"],
|
||||||
|
"--version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(vlc_command, stdout=subprocess.PIPE)
|
||||||
|
vlc_version = vlc_proc.stdout.read()
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
vlc_version = vlc_version.decode().split("version ")[1].split('\n')[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
vlc_version = "Missing"
|
||||||
|
print("Failed to find vlc.")
|
||||||
|
self.internal["versions"]["vlc"] = vlc_version
|
||||||
|
|
||||||
|
def read_json_config(self, conffilepath):
|
||||||
|
with open(conffilepath, 'r') as jsonconf:
|
||||||
|
confimport = json.load(jsonconf)
|
||||||
|
for section in list(confimport.keys()):
|
||||||
|
|
||||||
|
if section not in self.dict.keys():
|
||||||
|
self.dict[section] = {}
|
||||||
|
|
||||||
|
if section not in self.conf_default.keys():
|
||||||
|
self.conf_default[section] = {}
|
||||||
|
|
||||||
|
for key in list(confimport[section].keys()):
|
||||||
|
|
||||||
|
if key not in list(self.conf_default[section].keys()):
|
||||||
|
self.conf_default[section][key] = {}
|
||||||
|
|
||||||
|
confvalue = confimport[section][key]["value"]
|
||||||
|
if key == "xmltv_offset":
|
||||||
|
confvalue = str(confvalue)
|
||||||
|
elif isint(confvalue):
|
||||||
|
confvalue = int(confvalue)
|
||||||
|
elif isfloat(confvalue):
|
||||||
|
confvalue = float(confvalue)
|
||||||
|
elif is_arithmetic(confvalue):
|
||||||
|
confvalue = eval(confvalue)
|
||||||
|
elif "," in confvalue:
|
||||||
|
confvalue = confvalue.split(",")
|
||||||
|
elif str(confvalue).lower() in ["none"]:
|
||||||
|
confvalue = None
|
||||||
|
elif str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
|
||||||
|
self.dict[section][key] = confvalue
|
||||||
|
|
||||||
|
self.conf_default[section][key]["value"] = confvalue
|
||||||
|
|
||||||
|
for config_option in ["config_web_hidden", "config_file", "config_web"]:
|
||||||
|
if config_option not in list(confimport[section][key].keys()):
|
||||||
|
config_option_value = False
|
||||||
|
else:
|
||||||
|
config_option_value = confimport[section][key][config_option]
|
||||||
|
if str(config_option_value).lower() in ["none"]:
|
||||||
|
config_option_value = None
|
||||||
|
elif str(config_option_value).lower() in ["false"]:
|
||||||
|
config_option_value = False
|
||||||
|
elif str(config_option_value).lower() in ["true"]:
|
||||||
|
config_option_value = True
|
||||||
|
self.conf_default[section][key][config_option] = config_option_value
|
||||||
|
|
||||||
|
def read_ini_config(self, conffilepath):
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(conffilepath)
|
||||||
|
for each_section in config_handler.sections():
|
||||||
|
if each_section.lower() not in list(self.dict.keys()):
|
||||||
|
self.dict[each_section.lower()] = {}
|
||||||
|
for (each_key, each_val) in config_handler.items(each_section):
|
||||||
|
if not each_val:
|
||||||
|
each_val = None
|
||||||
|
elif each_key == "xmltv_offset":
|
||||||
|
each_val = str(each_val)
|
||||||
|
elif each_val.lower() in ["none"]:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["false"]:
|
||||||
|
each_val = False
|
||||||
|
elif each_val.lower() in ["true"]:
|
||||||
|
each_val = True
|
||||||
|
elif isint(each_val):
|
||||||
|
each_val = int(each_val)
|
||||||
|
elif isfloat(each_val):
|
||||||
|
each_val = float(each_val)
|
||||||
|
elif is_arithmetic(each_val):
|
||||||
|
each_val = eval(each_val)
|
||||||
|
elif "," in each_val:
|
||||||
|
each_val = each_val.split(",")
|
||||||
|
|
||||||
|
import_val = True
|
||||||
|
if each_section in list(self.conf_default.keys()):
|
||||||
|
if each_key in list(self.conf_default[each_section].keys()):
|
||||||
|
if not self.conf_default[each_section][each_key]["config_file"]:
|
||||||
|
import_val = False
|
||||||
|
|
||||||
|
if import_val:
|
||||||
|
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
|
def write(self, section, key, value):
|
||||||
|
|
||||||
|
if not value:
|
||||||
|
value = None
|
||||||
|
if value.lower() in ["none"]:
|
||||||
|
value = None
|
||||||
|
elif value.lower() in ["false"]:
|
||||||
|
value = False
|
||||||
|
elif value.lower() in ["true"]:
|
||||||
|
value = True
|
||||||
|
elif isint(value):
|
||||||
|
value = int(value)
|
||||||
|
elif isfloat(value):
|
||||||
|
value = float(value)
|
||||||
|
elif isinstance(value, list):
|
||||||
|
",".join(value)
|
||||||
|
|
||||||
|
if section == self.dict["main"]["dictpopname"]:
|
||||||
|
self.dict["origin"][key] = value
|
||||||
|
else:
|
||||||
|
self.dict[section][key] = value
|
||||||
|
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(self.config_file)
|
||||||
|
|
||||||
|
if not config_handler.has_section(section):
|
||||||
|
config_handler.add_section(section)
|
||||||
|
|
||||||
|
config_handler.set(section, key, str(value))
|
||||||
|
|
||||||
|
with open(self.config_file, 'w') as config_file:
|
||||||
|
config_handler.write(config_file)
|
||||||
|
|
||||||
|
def config_verification(self):
|
||||||
|
|
||||||
|
if self.dict["main"]["required"]:
|
||||||
|
required_missing = []
|
||||||
|
if isinstance(self.dict["main"]["required"], str):
|
||||||
|
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||||
|
if len(self.dict["main"]["required"]):
|
||||||
|
for req_item in self.dict["main"]["required"]:
|
||||||
|
req_section = req_item.split("/")[0]
|
||||||
|
req_key = req_item.split("/")[1]
|
||||||
|
if not self.dict[req_section][req_key]:
|
||||||
|
required_missing.append(req_item)
|
||||||
|
if len(required_missing):
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: %s" % ", ".join(required_missing))
|
||||||
|
|
||||||
|
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||||
|
|
||||||
|
if isinstance(self.dict["epg"]["valid_epg_methods"], str):
|
||||||
|
self.dict["epg"]["valid_epg_methods"] = [self.dict["epg"]["valid_epg_methods"]]
|
||||||
|
|
||||||
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
|
epg_methods = []
|
||||||
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
|
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||||
|
epg_methods.append("origin")
|
||||||
|
elif epg_method in ["None"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
elif epg_method in self.dict["epg"]["valid_epg_methods"]:
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
|
||||||
|
if not self.dict["main"]["uuid"]:
|
||||||
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
|
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
if self.dict["main"]["cache_dir"]:
|
||||||
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
|
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
|
cache_dir = self.internal["paths"]["cache_dir"]
|
||||||
|
|
||||||
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
|
self.internal["paths"]["logs_dir"] = logs_dir
|
||||||
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
|
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
|
|
||||||
|
def logging_setup(self):
|
||||||
|
|
||||||
|
log_level = self.dict["logging"]["level"].upper()
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||||
|
logger = logging.getLogger('fHDHR')
|
||||||
|
log_file = os.path.join(self.internal["paths"]["logs_dir"], 'fHDHR.log')
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler(log_file)
|
||||||
|
# c_handler.setLevel(log_level)
|
||||||
|
f_handler.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create formatters and add it to handlers
|
||||||
|
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
# c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
# logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
405
fHDHR/db/__init__.py
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from sqlalchemy import Column, create_engine, String, Text
|
||||||
|
from sqlalchemy.engine.url import URL
|
||||||
|
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize(value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
# sqlite likes to return ints for strings that look like ints, even though
|
||||||
|
# the column type is string. That's how you do dynamic typing wrong.
|
||||||
|
value = str(value)
|
||||||
|
# Just in case someone's mucking with the DB in a way we can't account for,
|
||||||
|
# ignore json parsing errors
|
||||||
|
try:
|
||||||
|
value = json.loads(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
BASE = declarative_base()
|
||||||
|
MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
||||||
|
'mysql_charset': 'utf8mb4',
|
||||||
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelValues(BASE):
|
||||||
|
__tablename__ = 'channel_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
channel = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class ProgramValues(BASE):
|
||||||
|
__tablename__ = 'program_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
program = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class CacheValues(BASE):
|
||||||
|
__tablename__ = 'cache_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
cacheitem = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRValues(BASE):
|
||||||
|
__tablename__ = 'fhdhr_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
item = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRdb(object):
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
# MySQL - mysql://username:password@localhost/db
|
||||||
|
# SQLite - sqlite:////cache/path/default.db
|
||||||
|
self.type = self.config.dict["database"]["type"]
|
||||||
|
|
||||||
|
# Handle SQLite explicitly as a default
|
||||||
|
if self.type == 'sqlite':
|
||||||
|
path = self.config.dict["database"]["path"]
|
||||||
|
path = os.path.expanduser(path)
|
||||||
|
self.filename = path
|
||||||
|
self.url = 'sqlite:///%s' % path
|
||||||
|
# Otherwise, handle all other database engines
|
||||||
|
else:
|
||||||
|
query = {}
|
||||||
|
if self.type == 'mysql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mysql'
|
||||||
|
query = {'charset': 'utf8mb4'}
|
||||||
|
elif self.type == 'postgres':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'postgresql'
|
||||||
|
elif self.type == 'oracle':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'oracle'
|
||||||
|
elif self.type == 'mssql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mssql+pymssql'
|
||||||
|
elif self.type == 'firebird':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'firebird+fdb'
|
||||||
|
elif self.type == 'sybase':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'sybase+pysybase'
|
||||||
|
else:
|
||||||
|
raise Exception('Unknown db_type')
|
||||||
|
|
||||||
|
db_user = self.config.dict["database"]["user"]
|
||||||
|
db_pass = self.config.dict["database"]["pass"]
|
||||||
|
db_host = self.config.dict["database"]["host"]
|
||||||
|
db_port = self.config.dict["database"]["port"] # Optional
|
||||||
|
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
|
||||||
|
|
||||||
|
# Ensure we have all our variables defined
|
||||||
|
if db_user is None or db_pass is None or db_host is None:
|
||||||
|
raise Exception('Please make sure the following core '
|
||||||
|
'configuration values are defined: '
|
||||||
|
'db_user, db_pass, db_host')
|
||||||
|
self.url = URL(drivername=drivername, username=db_user,
|
||||||
|
password=db_pass, host=db_host, port=db_port,
|
||||||
|
database=db_name, query=query)
|
||||||
|
|
||||||
|
self.engine = create_engine(self.url, pool_recycle=3600)
|
||||||
|
|
||||||
|
# Catch any errors connecting to database
|
||||||
|
try:
|
||||||
|
self.engine.connect()
|
||||||
|
except OperationalError:
|
||||||
|
print("OperationalError: Unable to connect to database.")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create our tables
|
||||||
|
BASE.metadata.create_all(self.engine)
|
||||||
|
|
||||||
|
self.ssession = scoped_session(sessionmaker(bind=self.engine))
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
if self.type != 'sqlite':
|
||||||
|
print(
|
||||||
|
"Raw connection requested when 'db_type' is not 'sqlite':\n"
|
||||||
|
"Consider using 'db.session()' to get a SQLAlchemy session "
|
||||||
|
"instead here:\n%s",
|
||||||
|
traceback.format_list(traceback.extract_stack()[:-1])[-1][:-1])
|
||||||
|
return self.engine.raw_connection()
|
||||||
|
|
||||||
|
def session(self):
|
||||||
|
return self.ssession()
|
||||||
|
|
||||||
|
def execute(self, *args, **kwargs):
|
||||||
|
return self.engine.execute(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_uri(self):
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Channel Values
|
||||||
|
|
||||||
|
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_channelvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Program Values
|
||||||
|
|
||||||
|
def set_program_value(self, program, key, value, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_programvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Cache Values
|
||||||
|
|
||||||
|
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# fHDHR Values
|
||||||
|
|
||||||
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
23
fHDHR/device/__init__.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from .channels import Channels
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .images import imageHandler
|
||||||
|
from .ssdp import SSDPServer
|
||||||
|
from .cluster import fHDHR_Cluster
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Device():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, originwrapper, alternative_epg):
|
||||||
|
|
||||||
|
self.channels = Channels(fhdhr, originwrapper)
|
||||||
|
|
||||||
|
self.epg = EPG(fhdhr, self.channels, originwrapper, alternative_epg)
|
||||||
|
|
||||||
|
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||||
|
|
||||||
|
self.images = imageHandler(fhdhr, self.epg)
|
||||||
|
|
||||||
|
self.ssdp = SSDPServer(fhdhr)
|
||||||
|
|
||||||
|
self.cluster = fHDHR_Cluster(fhdhr, self.ssdp)
|
||||||
118
fHDHR/device/channels/__init__.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
from .channel import Channel
|
||||||
|
from .chan_ident import Channel_IDs
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, originwrapper):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = originwrapper
|
||||||
|
|
||||||
|
self.id_system = Channel_IDs(fhdhr)
|
||||||
|
|
||||||
|
self.list = {}
|
||||||
|
|
||||||
|
self.get_db_channels()
|
||||||
|
|
||||||
|
def get_channel_obj(self, keyfind, valfind):
|
||||||
|
if keyfind == "number":
|
||||||
|
return next(self.list[fhdhr_id] for fhdhr_id in [x["id"] for x in self.get_channels()] if self.list[fhdhr_id].number == valfind) or None
|
||||||
|
else:
|
||||||
|
return next(self.list[fhdhr_id] for fhdhr_id in [x["id"] for x in self.get_channels()] if self.list[fhdhr_id].dict[keyfind] == valfind) or None
|
||||||
|
|
||||||
|
def get_channel_list(self, keyfind):
|
||||||
|
if keyfind == "number":
|
||||||
|
return [self.list[x].number for x in [x["id"] for x in self.get_channels()]]
|
||||||
|
else:
|
||||||
|
return [self.list[x].dict[keyfind] for x in [x["id"] for x in self.get_channels()]]
|
||||||
|
|
||||||
|
def set_channel_status(self, keyfind, valfind, updatedict):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_status(updatedict)
|
||||||
|
|
||||||
|
def set_channel_enablement_all(self, enablement):
|
||||||
|
for fhdhr_id in [x["id"] for x in self.get_channels()]:
|
||||||
|
self.list[fhdhr_id].set_enablement(enablement)
|
||||||
|
|
||||||
|
def set_channel_enablement(self, keyfind, valfind, enablement):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_enablement(enablement)
|
||||||
|
|
||||||
|
def set_channel_favorite(self, keyfind, valfind, enablement):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_favorite(enablement)
|
||||||
|
|
||||||
|
def get_db_channels(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Channel information stored in the database.")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if len(channel_ids):
|
||||||
|
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||||
|
for channel_id in channel_ids:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, channel_id=channel_id)
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
|
def save_db_channels(self):
|
||||||
|
channel_ids = [x["id"] for x in self.get_channels()]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
||||||
|
|
||||||
|
def get_channels(self, forceupdate=False):
|
||||||
|
"""Pull Channels from origin.
|
||||||
|
|
||||||
|
Output a list.
|
||||||
|
|
||||||
|
Don't pull more often than 12 hours.
|
||||||
|
"""
|
||||||
|
|
||||||
|
if not len(list(self.list.keys())):
|
||||||
|
self.get_db_channels()
|
||||||
|
|
||||||
|
if not forceupdate:
|
||||||
|
return [self.list[x].dict for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
channel_origin_id_list = [str(self.list[x].dict["origin_id"]) for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Performing Channel Scan.")
|
||||||
|
|
||||||
|
channel_dict_list = self.origin.get_channels()
|
||||||
|
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), self.fhdhr.config.dict["main"]["servicename"]))
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
|
||||||
|
|
||||||
|
newchan = 0
|
||||||
|
chan_scan_start = time.time()
|
||||||
|
for channel_info in channel_dict_list:
|
||||||
|
|
||||||
|
chan_existing = str(channel_info["id"]) in channel_origin_id_list
|
||||||
|
|
||||||
|
if chan_existing:
|
||||||
|
channel_obj = self.get_channel_obj("origin_id", channel_info["id"])
|
||||||
|
else:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin_id=channel_info["id"])
|
||||||
|
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
channel_obj.basics(channel_info)
|
||||||
|
if not chan_existing:
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
newchan += 1
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Channel Import took %s" % humanized_time(time.time() - chan_scan_start))
|
||||||
|
|
||||||
|
if not newchan:
|
||||||
|
newchan = "no"
|
||||||
|
self.fhdhr.logger.info("Found %s NEW channels." % newchan)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Total Channel Count: %s" % len(self.list.keys()))
|
||||||
|
self.save_db_channels()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time())
|
||||||
|
|
||||||
|
return [self.list[x].dict for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
def get_channel_stream(self, stream_args):
|
||||||
|
return self.origin.get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)
|
||||||
|
|
||||||
|
def get_channel_dict(self, keyfind, valfind):
|
||||||
|
return self.get_channel_obj(keyfind, valfind).dict
|
||||||
45
fHDHR/device/channels/chan_ident.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class Channel_IDs():
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get(self, origin_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
for existing_channel in existing_channel_info:
|
||||||
|
if existing_channel["origin_id"] == origin_id:
|
||||||
|
return existing_channel["id"]
|
||||||
|
return self.assign()
|
||||||
|
|
||||||
|
def assign(self):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
channel_id = None
|
||||||
|
while not channel_id:
|
||||||
|
unique_id = str(uuid.uuid4())
|
||||||
|
if str(unique_id) not in existing_ids:
|
||||||
|
channel_id = str(unique_id)
|
||||||
|
existing_ids.append(channel_id)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids)
|
||||||
|
return channel_id
|
||||||
|
|
||||||
|
def get_number(self, channel_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||||
|
if cnumber:
|
||||||
|
return cnumber
|
||||||
|
|
||||||
|
used_numbers = []
|
||||||
|
for existing_channel in existing_channel_info:
|
||||||
|
if existing_channel["subnumber"]:
|
||||||
|
number = "%s.%s" % (existing_channel["number"], existing_channel["subnumber"])
|
||||||
|
else:
|
||||||
|
number = existing_channel["number"]
|
||||||
|
used_numbers.append(number)
|
||||||
|
|
||||||
|
for i in range(1000, 2000):
|
||||||
|
if str(float(i)) not in used_numbers:
|
||||||
|
break
|
||||||
|
return str(float(i))
|
||||||
223
fHDHR/device/channels/channel.py
Normal file
@ -0,0 +1,223 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class Channel():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, id_system, origin_id=None, channel_id=None):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.id_system = id_system
|
||||||
|
|
||||||
|
if not channel_id:
|
||||||
|
if origin_id:
|
||||||
|
channel_id = id_system.get(origin_id)
|
||||||
|
else:
|
||||||
|
channel_id = id_system.assign()
|
||||||
|
self.channel_id = channel_id
|
||||||
|
|
||||||
|
self.dict = self.fhdhr.db.get_channel_value(str(channel_id), "dict") or self.default_dict
|
||||||
|
self.verify_dict()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def number(self):
|
||||||
|
if self.dict["subnumber"]:
|
||||||
|
return "%s.%s" % (self.dict["number"], self.dict["subnumber"])
|
||||||
|
else:
|
||||||
|
return self.dict["number"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbnail(self):
|
||||||
|
if str(self.dict["thumbnail"]).lower() in ["none"]:
|
||||||
|
return self.generic_image_url
|
||||||
|
elif self.dict["thumbnail"]:
|
||||||
|
return self.dict["thumbnail"]
|
||||||
|
elif self.dict["origin_thumbnail"]:
|
||||||
|
return self.dict["origin_thumbnail"]
|
||||||
|
else:
|
||||||
|
return self.generic_image_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epgdict(self):
|
||||||
|
return {
|
||||||
|
"callsign": self.dict["callsign"],
|
||||||
|
"name": self.dict["name"],
|
||||||
|
"number": self.number,
|
||||||
|
"id": self.dict["origin_id"],
|
||||||
|
"thumbnail": self.thumbnail,
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def verify_dict(self):
|
||||||
|
"""Development Purposes
|
||||||
|
Add new Channel dict keys
|
||||||
|
"""
|
||||||
|
default_dict = self.default_dict
|
||||||
|
for key in list(default_dict.keys()):
|
||||||
|
if key not in list(self.dict.keys()):
|
||||||
|
self.dict[key] = default_dict[key]
|
||||||
|
if self.dict["number"]:
|
||||||
|
if "." in self.dict["number"]:
|
||||||
|
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
||||||
|
self.dict["number"] = self.dict["number"].split(".")[0]
|
||||||
|
|
||||||
|
def basics(self, channel_info):
|
||||||
|
"""Some Channel Information is Critical"""
|
||||||
|
|
||||||
|
if "name" not in list(channel_info.keys()):
|
||||||
|
channel_info["name"] = self.dict["id"]
|
||||||
|
elif not channel_info["name"]:
|
||||||
|
channel_info["name"] = self.dict["id"]
|
||||||
|
self.dict["origin_name"] = channel_info["name"]
|
||||||
|
if not self.dict["name"]:
|
||||||
|
self.dict["name"] = self.dict["origin_name"]
|
||||||
|
|
||||||
|
if "id" not in list(channel_info.keys()):
|
||||||
|
channel_info["id"] = channel_info["name"]
|
||||||
|
elif not channel_info["id"]:
|
||||||
|
channel_info["id"] = channel_info["name"]
|
||||||
|
self.dict["origin_id"] = channel_info["id"]
|
||||||
|
|
||||||
|
if "callsign" not in list(channel_info.keys()):
|
||||||
|
channel_info["callsign"] = channel_info["name"]
|
||||||
|
elif not channel_info["callsign"]:
|
||||||
|
channel_info["callsign"] = channel_info["name"]
|
||||||
|
self.dict["origin_callsign"] = channel_info["callsign"]
|
||||||
|
if not self.dict["callsign"]:
|
||||||
|
self.dict["callsign"] = self.dict["origin_callsign"]
|
||||||
|
|
||||||
|
if "tags" not in list(channel_info.keys()):
|
||||||
|
channel_info["tags"] = []
|
||||||
|
elif not channel_info["tags"]:
|
||||||
|
channel_info["tags"] = []
|
||||||
|
self.dict["origin_tags"] = channel_info["tags"]
|
||||||
|
if not self.dict["tags"]:
|
||||||
|
self.dict["tags"] = self.dict["origin_tags"]
|
||||||
|
|
||||||
|
if "number" not in list(channel_info.keys()):
|
||||||
|
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
||||||
|
elif not channel_info["number"]:
|
||||||
|
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
||||||
|
self.dict["origin_number"] = str(channel_info["number"])
|
||||||
|
if not self.dict["number"]:
|
||||||
|
self.dict["number"] = self.dict["origin_number"].split(".")[0]
|
||||||
|
try:
|
||||||
|
self.dict["subnumber"] = self.dict["origin_number"].split(".")[1]
|
||||||
|
except IndexError:
|
||||||
|
self.dict["subnumber"] = None
|
||||||
|
else:
|
||||||
|
if "." in self.dict["number"]:
|
||||||
|
self.dict["subnumber"] = self.dict["number"].split(".")[1]
|
||||||
|
self.dict["number"] = self.dict["number"].split(".")[0]
|
||||||
|
|
||||||
|
if "thumbnail" not in list(channel_info.keys()):
|
||||||
|
channel_info["thumbnail"] = None
|
||||||
|
self.dict["origin_thumbnail"] = channel_info["thumbnail"]
|
||||||
|
if not self.dict["thumbnail"]:
|
||||||
|
self.dict["thumbnail"] = self.dict["origin_thumbnail"]
|
||||||
|
|
||||||
|
if "HD" not in list(channel_info.keys()):
|
||||||
|
channel_info["HD"] = 0
|
||||||
|
self.dict["HD"] = channel_info["HD"]
|
||||||
|
|
||||||
|
if "enabled" in list(channel_info.keys()):
|
||||||
|
if "created" not in list(self.dict.keys()):
|
||||||
|
self.dict["enabled"] = channel_info["enabled"]
|
||||||
|
|
||||||
|
if "created" not in list(self.dict.keys()):
|
||||||
|
self.dict["created"] = time.time()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_dict(self):
|
||||||
|
return {
|
||||||
|
"id": str(self.channel_id), "origin_id": None,
|
||||||
|
"name": None, "origin_name": None,
|
||||||
|
"callsign": None, "origin_callsign": None,
|
||||||
|
"number": None, "subnumber": None, "origin_number": None,
|
||||||
|
"tags": [], "origin_tags": [],
|
||||||
|
"thumbnail": None, "origin_thumbnail": None,
|
||||||
|
"enabled": True, "favorite": 0,
|
||||||
|
"HD": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
self.fhdhr.db.delete_channel_value(self.dict["id"], "dict")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if self.dict["id"] in channel_ids:
|
||||||
|
channel_ids.remove(self.dict["id"])
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
||||||
|
|
||||||
|
def set_status(self, updatedict):
|
||||||
|
for key in list(updatedict.keys()):
|
||||||
|
if key == "number":
|
||||||
|
updatedict[key] = str(updatedict[key])
|
||||||
|
self.dict[key] = updatedict[key]
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lineup_dict(self):
|
||||||
|
return {
|
||||||
|
'GuideNumber': self.number,
|
||||||
|
'GuideName': self.dict['name'],
|
||||||
|
'Tags': ",".join(self.dict['tags']),
|
||||||
|
'URL': self.hdhr_stream_url,
|
||||||
|
'HD': self.dict["HD"],
|
||||||
|
"Favorite": self.dict["favorite"],
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def generic_image_url(self):
|
||||||
|
return "/api/images?method=generate&type=channel&message=%s" % self.number
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hdhr_stream_url(self):
|
||||||
|
return '/auto/%s' % self.hdhr_stream_ident
|
||||||
|
|
||||||
|
@property
|
||||||
|
def hdhr_stream_ident(self):
|
||||||
|
return 'v%s' % self.number
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rmg_stream_url(self):
|
||||||
|
return "/devices/%s/media/%s" % (self.fhdhr.config.dict["main"]["uuid"], self.rmg_stream_ident)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def rmg_stream_ident(self):
|
||||||
|
return "id://%s" % self.number
|
||||||
|
|
||||||
|
@property
|
||||||
|
def api_stream_url(self):
|
||||||
|
return '/api/tuners?method=%s&channel=%s' % (self.fhdhr.config.dict["fhdhr"]["stream_type"], self.number)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def m3u_url(self):
|
||||||
|
return '/api/m3u?method=get&channel=%s' % self.number
|
||||||
|
|
||||||
|
def set_favorite(self, enablement):
|
||||||
|
if enablement == "+":
|
||||||
|
self.dict["favorite"] = 1
|
||||||
|
elif enablement == "+":
|
||||||
|
self.dict["favorite"] = 0
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
||||||
|
|
||||||
|
def set_enablement(self, enablement):
|
||||||
|
if enablement == "disable":
|
||||||
|
self.dict["enabled"] = False
|
||||||
|
elif enablement == "enable":
|
||||||
|
self.dict["enabled"] = True
|
||||||
|
elif enablement == "toggle":
|
||||||
|
if self.dict["enabled"]:
|
||||||
|
self.dict["enabled"] = False
|
||||||
|
else:
|
||||||
|
self.dict["enabled"] = True
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
158
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Cluster():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, ssdp):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp = ssdp
|
||||||
|
|
||||||
|
self.friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_cluster_dicts_web(self):
|
||||||
|
fhdhr_list = self.cluster()
|
||||||
|
locations = []
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
item_dict = {
|
||||||
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
|
"name": fhdhr_list[location]["name"]
|
||||||
|
}
|
||||||
|
if item_dict["base_url"] != self.fhdhr.api.base:
|
||||||
|
locations.append(item_dict)
|
||||||
|
if len(locations):
|
||||||
|
locations = sorted(locations, key=lambda i: i['name'])
|
||||||
|
return locations
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.ssdp.detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.fhdhr.api.base] = {
|
||||||
|
"base_url": self.fhdhr.api.base,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
self.fhdhr.logger.info("Syncronizing with Cluster.")
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if not len(list(cluster.keys())):
|
||||||
|
self.fhdhr.logger.info("No Cluster Found.")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
self.fhdhr.logger.debug("Checking Cluster Syncronization information from %s." % location)
|
||||||
|
sync_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.fhdhr.api.base not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.fhdhr.logger.info("Leaving cluster.")
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
self.fhdhr.logger.info("Informing %s that I am departing the Cluster." % location)
|
||||||
|
sync_url = "%s/api/cluster?method=del&location=%s" % (location, self.fhdhr.api.base)
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
sync_url = "%s/api/cluster?method=sync&location=%s" % (location, self.fhdhr.api.base_quoted)
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Adding %s to cluster." % location)
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = "%s/hdhr/discover.json" % location
|
||||||
|
try:
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["FriendlyName"]
|
||||||
|
|
||||||
|
cluster_info_url = "%s/api/cluster?method=get" % location
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.fhdhr.web.session.get(cluster_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Removing %s from cluster." % location)
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = "%s/api/cluster?method=leave" % location
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: %s" % location)
|
||||||
|
self.push_sync()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
321
fHDHR/device/epg/__init__.py
Normal file
@ -0,0 +1,321 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort
|
||||||
|
|
||||||
|
from .blocks import blocksEPG
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels, originwrapper, alternative_epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = originwrapper
|
||||||
|
self.channels = channels
|
||||||
|
self.alternative_epg = alternative_epg
|
||||||
|
|
||||||
|
self.epgdict = {}
|
||||||
|
|
||||||
|
self.epg_methods = self.fhdhr.config.dict["epg"]["method"]
|
||||||
|
self.valid_epg_methods = [x for x in self.fhdhr.config.dict["epg"]["valid_epg_methods"] if x and x not in [None, "None"]]
|
||||||
|
|
||||||
|
self.blocks = blocksEPG(self.fhdhr, self.channels)
|
||||||
|
self.epg_handling = {
|
||||||
|
"origin": self.origin,
|
||||||
|
"blocks": self.blocks,
|
||||||
|
}
|
||||||
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
|
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||||
|
self.sleeptime = {}
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||||
|
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
||||||
|
if epg_method not in list(self.sleeptime.keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
||||||
|
|
||||||
|
self.epg_update_url = "/api/epg?method=update"
|
||||||
|
|
||||||
|
self.fhdhr.threads["epg"] = threading.Thread(target=self.run)
|
||||||
|
|
||||||
|
def clear_epg_cache(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Clearing %s EPG cache." % epgtypename)
|
||||||
|
|
||||||
|
if hasattr(self.epg_handling[method], 'clear_cache'):
|
||||||
|
self.epg_handling[method].clear_cache()
|
||||||
|
|
||||||
|
if method in list(self.epgdict.keys()):
|
||||||
|
del self.epgdict[method]
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
||||||
|
|
||||||
|
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
|
||||||
|
nowtime = time.time()
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
try:
|
||||||
|
listings = epgdict[channel_number]["listing"]
|
||||||
|
except KeyError:
|
||||||
|
listings = []
|
||||||
|
for listing in listings:
|
||||||
|
for time_item in ["time_start", "time_end"]:
|
||||||
|
time_value = listing[time_item]
|
||||||
|
if str(time_value).endswith("+00:00"):
|
||||||
|
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
||||||
|
elif str(time_value).endswith("+0000"):
|
||||||
|
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
||||||
|
else:
|
||||||
|
listing[time_item] = int(time_value)
|
||||||
|
if int(listing["time_start"]) <= nowtime <= int(listing["time_end"]):
|
||||||
|
epgitem = epgdict[channel_number].copy()
|
||||||
|
epgitem["listing"] = [listing]
|
||||||
|
return epgitem
|
||||||
|
epgitem = epgdict[channel_number].copy()
|
||||||
|
epgitem["listing"] = [self.blocks.empty_listing(chan_obj=None, chan_dict=None)]
|
||||||
|
return epgitem
|
||||||
|
|
||||||
|
def whats_on_allchans(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
channel_guide_dict = {}
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
channel_number = chan_obj.number
|
||||||
|
epgdict[channel_number] = epgdict.pop(c)
|
||||||
|
epgdict[channel_number]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[channel_number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[channel_number]["number"] = chan_obj.number
|
||||||
|
epgdict[channel_number]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[channel_number]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
else:
|
||||||
|
chan_obj = None
|
||||||
|
channel_number = c
|
||||||
|
whatson = self.whats_on_now(channel_number, method, chan_dict=epgdict, chan_obj=chan_obj)
|
||||||
|
if whatson:
|
||||||
|
channel_guide_dict[channel_number] = whatson
|
||||||
|
return channel_guide_dict
|
||||||
|
|
||||||
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
if method in list(self.epgdict.keys()):
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
self.update(method)
|
||||||
|
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
def get_thumbnail(self, itemtype, itemid):
|
||||||
|
if itemtype == "channel":
|
||||||
|
chandict = self.find_channel_dict(itemid)
|
||||||
|
return chandict["thumbnail"]
|
||||||
|
elif itemtype == "content":
|
||||||
|
progdict = self.find_program_dict(itemid)
|
||||||
|
return progdict["thumbnail"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_channel_dict(self, channel_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
channel_list = [epgdict[x] for x in list(epgdict.keys())]
|
||||||
|
return next(item for item in channel_list if item["id"] == channel_id) or None
|
||||||
|
|
||||||
|
def find_program_dict(self, event_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
event_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
event_list.extend(epgdict[channel]["listing"])
|
||||||
|
return next(item for item in event_list if item["id"] == event_id) or None
|
||||||
|
|
||||||
|
def epg_method_selfadd(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Alternative EPG methods.")
|
||||||
|
new_epgtype_list = []
|
||||||
|
for entry in os.scandir(self.fhdhr.config.internal["paths"]["alternative_epg"]):
|
||||||
|
if entry.is_file():
|
||||||
|
if entry.name[0] != '_' and entry.name.endswith(".py"):
|
||||||
|
new_epgtype_list.append(str(entry.name[:-3]))
|
||||||
|
elif entry.is_dir():
|
||||||
|
if entry.name[0] != '_':
|
||||||
|
new_epgtype_list.append(str(entry.name))
|
||||||
|
for method in new_epgtype_list:
|
||||||
|
self.fhdhr.logger.info("Found %s EPG method." % method)
|
||||||
|
self.epg_handling[method] = eval("self.alternative_epg.%s.%sEPG(self.fhdhr, self.channels)" % (method, method))
|
||||||
|
|
||||||
|
def update(self, method=None):
|
||||||
|
|
||||||
|
if (not method or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = self.def_method
|
||||||
|
|
||||||
|
if method == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Updating %s EPG cache." % epgtypename)
|
||||||
|
if method == 'origin':
|
||||||
|
programguide = self.epg_handling['origin'].update_epg(self.channels)
|
||||||
|
else:
|
||||||
|
programguide = self.epg_handling[method].update_epg()
|
||||||
|
|
||||||
|
# sort the channel listings by time stamp
|
||||||
|
for cnum in list(programguide.keys()):
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
|
||||||
|
# Gernate Block periods for between EPG data, if missing
|
||||||
|
clean_prog_guide = {}
|
||||||
|
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
||||||
|
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
||||||
|
for cnum in list(programguide.keys()):
|
||||||
|
|
||||||
|
if cnum not in list(clean_prog_guide.keys()):
|
||||||
|
clean_prog_guide[cnum] = programguide[cnum].copy()
|
||||||
|
clean_prog_guide[cnum]["listing"] = []
|
||||||
|
|
||||||
|
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
|
||||||
|
else:
|
||||||
|
chan_obj = None
|
||||||
|
|
||||||
|
# Generate Blocks for Channels containing No Lisiings
|
||||||
|
if not len(programguide[cnum]["listing"]):
|
||||||
|
timestamps = self.blocks.timestamps_between(desired_start_time, desired_end_time)
|
||||||
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||||
|
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
# Clean Timetamps from old xmltv method to timestamps
|
||||||
|
progindex = 0
|
||||||
|
for program_item in programguide[cnum]["listing"]:
|
||||||
|
for time_item in ["time_start", "time_end"]:
|
||||||
|
time_value = programguide[cnum]["listing"][progindex][time_item]
|
||||||
|
if str(time_value).endswith("+00:00"):
|
||||||
|
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
|
||||||
|
elif str(time_value).endswith("+0000"):
|
||||||
|
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
|
||||||
|
else:
|
||||||
|
programguide[cnum]["listing"][progindex][time_item] = int(time_value)
|
||||||
|
progindex += 1
|
||||||
|
|
||||||
|
# Generate time before the listing actually starts
|
||||||
|
first_prog_time = programguide[cnum]["listing"][0]['time_start']
|
||||||
|
if desired_start_time < first_prog_time:
|
||||||
|
timestamps = self.blocks.timestamps_between(desired_start_time, first_prog_time)
|
||||||
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||||
|
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||||
|
|
||||||
|
# Generate time blocks between events if chunks of time are missing
|
||||||
|
progindex = 0
|
||||||
|
for program_item in programguide[cnum]["listing"]:
|
||||||
|
try:
|
||||||
|
nextprog_dict = programguide[cnum]["listing"][progindex + 1]
|
||||||
|
except IndexError:
|
||||||
|
nextprog_dict = None
|
||||||
|
if not nextprog_dict:
|
||||||
|
clean_prog_guide[cnum]["listing"].append(program_item)
|
||||||
|
else:
|
||||||
|
if nextprog_dict['time_start'] > program_item['time_end']:
|
||||||
|
timestamps = self.blocks.timestamps_between(program_item['time_end'], nextprog_dict['time_start'])
|
||||||
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||||
|
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||||
|
else:
|
||||||
|
clean_prog_guide[cnum]["listing"].append(program_item)
|
||||||
|
progindex += 1
|
||||||
|
|
||||||
|
# Generate time after the listing actually ends
|
||||||
|
end_prog_time = programguide[cnum]["listing"][progindex]['time_end']
|
||||||
|
if desired_end_time > end_prog_time:
|
||||||
|
timestamps = self.blocks.timestamps_between(end_prog_time, desired_end_time)
|
||||||
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
|
||||||
|
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
|
||||||
|
|
||||||
|
programguide = clean_prog_guide.copy()
|
||||||
|
|
||||||
|
# if a stock method, generate Blocks EPG for missing channels
|
||||||
|
if method in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
timestamps = self.blocks.timestamps
|
||||||
|
for fhdhr_id in [x["id"] for x in self.channels.get_channels()]:
|
||||||
|
chan_obj = self.channels.list[fhdhr_id]
|
||||||
|
if str(chan_obj.number) not in list(programguide.keys()):
|
||||||
|
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||||
|
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
||||||
|
programguide[str(chan_obj.number)]["listing"].extend(clean_prog_dicts)
|
||||||
|
|
||||||
|
# Make Thumbnails for missing thumbnails
|
||||||
|
for cnum in list(programguide.keys()):
|
||||||
|
if not programguide[cnum]["thumbnail"]:
|
||||||
|
programguide[cnum]["thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % programguide[cnum]["number"]
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
prog_index = 0
|
||||||
|
for program_item in programguide[cnum]["listing"]:
|
||||||
|
if not programguide[cnum]["listing"][prog_index]["thumbnail"]:
|
||||||
|
programguide[cnum]["listing"][prog_index]["thumbnail"] = programguide[cnum]["thumbnail"]
|
||||||
|
prog_index += 1
|
||||||
|
|
||||||
|
# Get Totals
|
||||||
|
total_channels = len(list(programguide.keys()))
|
||||||
|
total_programs = 0
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(programguide.keys()))
|
||||||
|
sorted_chan_guide = {}
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
total_programs += len(programguide[cnum]["listing"])
|
||||||
|
sorted_chan_guide[channel] = programguide[channel]
|
||||||
|
|
||||||
|
self.epgdict[method] = sorted_chan_guide
|
||||||
|
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
|
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (epgtypename, total_programs, total_channels))
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.fhdhr.logger.info("EPG Update Thread Starting")
|
||||||
|
self.fhdhr.threads["epg"].start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.fhdhr.logger.info("EPG Update Thread Stopping")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
time.sleep(1800)
|
||||||
|
while True:
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
last_update_time = self.fhdhr.db.get_fhdhr_value("update_time", epg_method)
|
||||||
|
updatetheepg = False
|
||||||
|
if not last_update_time:
|
||||||
|
updatetheepg = True
|
||||||
|
elif time.time() >= (last_update_time + self.sleeptime[epg_method]):
|
||||||
|
updatetheepg = True
|
||||||
|
if updatetheepg:
|
||||||
|
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
||||||
|
time.sleep(1800)
|
||||||
|
|
||||||
|
self.stop()
|
||||||
119
fHDHR/device/epg/blocks.py
Normal file
@ -0,0 +1,119 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class blocksEPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
timestamps = self.timestamps
|
||||||
|
|
||||||
|
for fhdhr_id in [x["id"] for x in self.channels.get_channels()]:
|
||||||
|
chan_obj = self.channels.list[fhdhr_id]
|
||||||
|
|
||||||
|
if str(chan_obj.number) not in list(programguide.keys()):
|
||||||
|
programguide[str(chan_obj.number)] = chan_obj.epgdict
|
||||||
|
|
||||||
|
clean_prog_dicts = self.empty_channel_epg(timestamps, chan_obj=chan_obj)
|
||||||
|
for clean_prog_dict in clean_prog_dicts:
|
||||||
|
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
return programguide
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timestamps(self):
|
||||||
|
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
|
||||||
|
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
|
||||||
|
return self.timestamps_between(desired_start_time, desired_end_time)
|
||||||
|
|
||||||
|
def timestamps_between(self, starttime, endtime):
|
||||||
|
timestamps = []
|
||||||
|
desired_blocksize = self.fhdhr.config.dict["epg"]["block_size"]
|
||||||
|
current_time = starttime
|
||||||
|
while (current_time + desired_blocksize) <= endtime:
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": current_time,
|
||||||
|
"time_end": current_time + desired_blocksize,
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
current_time += desired_blocksize
|
||||||
|
if current_time < endtime:
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": current_time,
|
||||||
|
"time_end": endtime
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
return timestamps
|
||||||
|
|
||||||
|
def single_channel_epg(self, timestampdict, chan_obj=None, chan_dict=None):
|
||||||
|
|
||||||
|
if chan_obj:
|
||||||
|
content_id = "%s_%s" % (chan_obj.dict["origin_id"], timestampdict['time_start'])
|
||||||
|
elif chan_dict:
|
||||||
|
content_id = "%s_%s" % (chan_dict["id"], timestampdict['time_start'])
|
||||||
|
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": timestampdict['time_start'],
|
||||||
|
"time_end": timestampdict['time_end'],
|
||||||
|
"duration_minutes": (timestampdict['time_end'] - timestampdict['time_start']) / 60,
|
||||||
|
"title": "Unavailable",
|
||||||
|
"sub-title": "Unavailable",
|
||||||
|
"description": "Unavailable",
|
||||||
|
"rating": "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": content_id,
|
||||||
|
}
|
||||||
|
if chan_obj:
|
||||||
|
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
||||||
|
elif chan_dict:
|
||||||
|
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
||||||
|
if not clean_prog_dict["thumbnail"]:
|
||||||
|
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=%s" % content_id
|
||||||
|
|
||||||
|
return clean_prog_dict
|
||||||
|
|
||||||
|
def empty_channel_epg(self, timestamps, chan_obj=None, chan_dict=None):
|
||||||
|
clean_prog_dicts = []
|
||||||
|
for timestampdict in timestamps:
|
||||||
|
clean_prog_dict = self.single_channel_epg(timestampdict, chan_obj=chan_obj, chan_dict=chan_dict)
|
||||||
|
clean_prog_dicts.append(clean_prog_dict)
|
||||||
|
return clean_prog_dicts
|
||||||
|
|
||||||
|
def empty_listing(self, chan_obj=None, chan_dict=None):
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": None,
|
||||||
|
"time_end": None,
|
||||||
|
"duration_minutes": None,
|
||||||
|
"title": "Unavailable",
|
||||||
|
"sub-title": "Unavailable",
|
||||||
|
"description": "Unavailable",
|
||||||
|
"rating": "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": "Unavailable",
|
||||||
|
}
|
||||||
|
|
||||||
|
if chan_obj:
|
||||||
|
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
|
||||||
|
elif chan_dict:
|
||||||
|
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
|
||||||
|
else:
|
||||||
|
clean_prog_dict["thumbnail"] = None
|
||||||
|
if not clean_prog_dict["thumbnail"]:
|
||||||
|
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=Unavailable"
|
||||||
|
|
||||||
|
return clean_prog_dict
|
||||||
60
fHDHR/device/images.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from io import BytesIO
|
||||||
|
import PIL.Image
|
||||||
|
import PIL.ImageDraw
|
||||||
|
import PIL.ImageFont
|
||||||
|
|
||||||
|
|
||||||
|
class imageHandler():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get_epg_image(self, image_type, content_id):
|
||||||
|
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
|
||||||
|
if not imageUri:
|
||||||
|
return self.generate_image(image_type, str(content_id))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(imageUri)
|
||||||
|
return req.content
|
||||||
|
|
||||||
|
def getSize(self, txt, font):
|
||||||
|
testImg = PIL.Image.new('RGB', (1, 1))
|
||||||
|
testDraw = PIL.ImageDraw.Draw(testImg)
|
||||||
|
return testDraw.textsize(txt, font)
|
||||||
|
|
||||||
|
def generate_image(self, messagetype, message):
|
||||||
|
if messagetype == "channel":
|
||||||
|
width = 360
|
||||||
|
height = 270
|
||||||
|
fontsize = 72
|
||||||
|
elif messagetype == "content":
|
||||||
|
width = 1080
|
||||||
|
height = 1440
|
||||||
|
fontsize = 100
|
||||||
|
|
||||||
|
colorBackground = "#228822"
|
||||||
|
colorText = "#717D7E"
|
||||||
|
colorOutline = "#717D7E"
|
||||||
|
fontname = str(self.fhdhr.config.internal["paths"]["font"])
|
||||||
|
|
||||||
|
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||||
|
text_width, text_height = self.getSize(message, font)
|
||||||
|
img = PIL.Image.new('RGBA', (width+4, height+4), colorBackground)
|
||||||
|
d = PIL.ImageDraw.Draw(img)
|
||||||
|
d.text(((width-text_width)/2, (height-text_height)/2), message, fill=colorText, font=font)
|
||||||
|
d.rectangle((0, 0, width+3, height+3), outline=colorOutline)
|
||||||
|
|
||||||
|
s = BytesIO()
|
||||||
|
img.save(s, 'png')
|
||||||
|
return s.getvalue()
|
||||||
|
|
||||||
|
def get_image_type(self, image_data):
|
||||||
|
header_byte = image_data[0:3].hex().lower()
|
||||||
|
if header_byte == '474946':
|
||||||
|
return "image/gif"
|
||||||
|
elif header_byte == '89504e':
|
||||||
|
return "image/png"
|
||||||
|
elif header_byte == 'ffd8ff':
|
||||||
|
return "image/jpeg"
|
||||||
|
else:
|
||||||
|
return "image/jpeg"
|
||||||
217
fHDHR/device/ssdp/__init__.py
Normal file
@ -0,0 +1,217 @@
|
|||||||
|
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
import time
|
||||||
|
import threading
|
||||||
|
|
||||||
|
from .ssdp_detect import fHDHR_Detect
|
||||||
|
from .rmg_ssdp import RMG_SSDP
|
||||||
|
from .hdhr_ssdp import HDHR_SSDP
|
||||||
|
|
||||||
|
|
||||||
|
class SSDPServer():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.detect_method = fHDHR_Detect(fhdhr)
|
||||||
|
|
||||||
|
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
|
||||||
|
|
||||||
|
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
||||||
|
self.fhdhr.config.dict["ssdp"]["enabled"]):
|
||||||
|
self.setup_ssdp()
|
||||||
|
|
||||||
|
self.sock.bind((self.bind_address, 1900))
|
||||||
|
|
||||||
|
self.msearch_payload = self.create_msearch_payload()
|
||||||
|
|
||||||
|
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
||||||
|
self.age_time = None
|
||||||
|
|
||||||
|
self.rmg_ssdp = RMG_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
||||||
|
self.hdhr_ssdp = HDHR_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
||||||
|
|
||||||
|
self.do_alive()
|
||||||
|
self.m_search()
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.fhdhr.logger.info("SSDP Server Starting")
|
||||||
|
self.fhdhr.threads["ssdp"].start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.fhdhr.logger.info("SSDP Server Stopping")
|
||||||
|
self.sock.close()
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
while True:
|
||||||
|
data, address = self.sock.recvfrom(1024)
|
||||||
|
self.on_recv(data, address)
|
||||||
|
self.do_alive()
|
||||||
|
self.stop()
|
||||||
|
|
||||||
|
def do_alive(self, forcealive=False):
|
||||||
|
|
||||||
|
send_alive = False
|
||||||
|
if not self.age_time:
|
||||||
|
send_alive = True
|
||||||
|
elif forcealive:
|
||||||
|
send_alive = True
|
||||||
|
elif time.time() >= (self.age_time + self.max_age):
|
||||||
|
send_alive = True
|
||||||
|
|
||||||
|
if send_alive:
|
||||||
|
self.fhdhr.logger.info("Sending Alive message to network.")
|
||||||
|
self.do_notify(self.broadcase_address_tuple)
|
||||||
|
self.age_time = time.time()
|
||||||
|
|
||||||
|
def do_notify(self, address):
|
||||||
|
|
||||||
|
notify_list = []
|
||||||
|
|
||||||
|
hdhr_notify = self.hdhr_ssdp.get()
|
||||||
|
notify_list.append(hdhr_notify)
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["rmg"]["enabled"]:
|
||||||
|
rmg_notify = self.rmg_ssdp.get()
|
||||||
|
notify_list.append(rmg_notify)
|
||||||
|
|
||||||
|
for notifydata in notify_list:
|
||||||
|
|
||||||
|
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
||||||
|
try:
|
||||||
|
self.sock.sendto(notifydata, address)
|
||||||
|
except OSError as e:
|
||||||
|
# Most commonly: We received a multicast from an IP not in our subnet
|
||||||
|
self.fhdhr.logger.debug("Unable to send NOTIFY: %s" % e)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_recv(self, data, address):
|
||||||
|
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
||||||
|
|
||||||
|
try:
|
||||||
|
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||||
|
except ValueError:
|
||||||
|
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
||||||
|
return
|
||||||
|
|
||||||
|
lines = header.split('\r\n')
|
||||||
|
cmd = lines[0].split(' ')
|
||||||
|
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||||
|
lines = filter(lambda x: len(x) > 0, lines)
|
||||||
|
|
||||||
|
headers = [x.split(':', 1) for x in lines]
|
||||||
|
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||||
|
|
||||||
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
|
# SSDP discovery
|
||||||
|
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
|
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
||||||
|
|
||||||
|
self.do_notify(address)
|
||||||
|
|
||||||
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
# SSDP presence
|
||||||
|
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
|
try:
|
||||||
|
if headers["server"].startswith("fHDHR"):
|
||||||
|
savelocation = headers["location"].split("/device.xml")[0]
|
||||||
|
if savelocation.endswith("/hdhr"):
|
||||||
|
savelocation = savelocation.replace("/hdhr", '')
|
||||||
|
elif savelocation.endswith("/rmg"):
|
||||||
|
savelocation = savelocation.replace("/rmg", '')
|
||||||
|
if savelocation != self.fhdhr.api.base:
|
||||||
|
self.detect_method.set(savelocation)
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
|
def m_search(self):
|
||||||
|
data = self.msearch_payload
|
||||||
|
self.sock.sendto(data, self.broadcase_address_tuple)
|
||||||
|
|
||||||
|
def create_msearch_payload(self):
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "M-SEARCH * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
||||||
|
"MAN": "ssdp:discover",
|
||||||
|
"ST": "ssdp:all",
|
||||||
|
"MX": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def setup_ssdp(self):
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
self.proto = self.setup_proto()
|
||||||
|
self.iface = self.fhdhr.config.dict["ssdp"]["iface"]
|
||||||
|
self.address = self.fhdhr.config.dict["ssdp"]["multicast_address"]
|
||||||
|
self.setup_addressing()
|
||||||
|
|
||||||
|
self.sock = socket.socket(self.af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
self.setup_interface()
|
||||||
|
|
||||||
|
self.setup_multicasting()
|
||||||
|
|
||||||
|
def setup_proto(self):
|
||||||
|
proto = self.fhdhr.config.dict["ssdp"]["proto"]
|
||||||
|
allowed_protos = ("ipv4", "ipv6")
|
||||||
|
if proto not in allowed_protos:
|
||||||
|
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||||
|
return proto
|
||||||
|
|
||||||
|
def setup_addressing(self):
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
self.af_type = socket.AF_INET
|
||||||
|
self.broadcast_ip = "239.255.255.250"
|
||||||
|
self.broadcase_address_tuple = (self.broadcast_ip, 1900)
|
||||||
|
self.bind_address = "0.0.0.0"
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
self.af_type = socket.AF_INET6
|
||||||
|
self.broadcast_ip = "ff02::c"
|
||||||
|
self.broadcast_address_tuple = (self.broadcast_ip, 1900, 0, 0)
|
||||||
|
self.bind_address = "::"
|
||||||
|
|
||||||
|
def setup_interface(self):
|
||||||
|
# Bind to specific interface
|
||||||
|
if self.iface is not None:
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||||
|
|
||||||
|
def setup_multicasting(self):
|
||||||
|
# Subscribe to multicast address
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
mreq = socket.inet_aton(self.broadcast_ip)
|
||||||
|
if self.address is not None:
|
||||||
|
mreq += socket.inet_aton(self.address)
|
||||||
|
else:
|
||||||
|
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
||||||
|
# Allow multicasts on loopback devices (necessary for testing)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||||
|
mreq = socket.inet_pton(socket.AF_INET6, self.broadcast_ip)
|
||||||
|
if self.iface is not None:
|
||||||
|
iface_index = socket.if_nametoindex(self.iface)
|
||||||
|
# Send outgoing packets from the same interface
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||||
|
mreq += struct.pack(b"@I", iface_index)
|
||||||
|
else:
|
||||||
|
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||||
|
)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||||
49
fHDHR/device/ssdp/hdhr_ssdp.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class HDHR_SSDP():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, broadcast_ip, max_age):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp_content = None
|
||||||
|
|
||||||
|
self.broadcast_ip = broadcast_ip
|
||||||
|
self.device_xml_path = '/hdhr/device.xml'
|
||||||
|
|
||||||
|
self.cable_schema = "urn:schemas-opencable-com:service:Security:1"
|
||||||
|
self.ota_schema = "urn:schemas-upnp-org:device:MediaServer:1"
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "antenna":
|
||||||
|
self.schema = self.ota_schema
|
||||||
|
elif self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "cable":
|
||||||
|
self.schema = self.cable_schema
|
||||||
|
else:
|
||||||
|
self.schema = self.ota_schema
|
||||||
|
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self.ssdp_content:
|
||||||
|
return self.ssdp_content.encode("utf-8")
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "NOTIFY * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % ("239.255.255.250", 1900),
|
||||||
|
"NT": self.schema,
|
||||||
|
"NTS": "ssdp:alive",
|
||||||
|
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], self.schema),
|
||||||
|
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
||||||
|
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"AL": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"Cache-Control:max-age=": self.max_age
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
self.ssdp_content = data
|
||||||
|
return data.encode("utf-8")
|
||||||
49
fHDHR/device/ssdp/rmg_ssdp.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class RMG_SSDP():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, broadcast_ip, max_age):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp_content = None
|
||||||
|
|
||||||
|
self.broadcast_ip = broadcast_ip
|
||||||
|
self.device_xml_path = '/rmg/device.xml'
|
||||||
|
|
||||||
|
self.cable_schema = "urn:schemas-opencable-com:service:Security:1"
|
||||||
|
self.ota_schema = "urn:schemas-upnp-org:device-1-0"
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "antenna":
|
||||||
|
self.schema = self.ota_schema
|
||||||
|
elif self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"].lower() == "cable":
|
||||||
|
self.schema = self.cable_schema
|
||||||
|
else:
|
||||||
|
self.schema = self.ota_schema
|
||||||
|
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self.ssdp_content:
|
||||||
|
return self.ssdp_content.encode("utf-8")
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "NOTIFY * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % ("239.255.255.250", 1900),
|
||||||
|
"NT": self.schema,
|
||||||
|
"NTS": "ssdp:alive",
|
||||||
|
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], self.schema),
|
||||||
|
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
||||||
|
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"AL": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"Cache-Control:max-age=": self.max_age
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
self.ssdp_content = data
|
||||||
|
return data.encode("utf-8")
|
||||||
16
fHDHR/device/ssdp/ssdp_detect.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Detect():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
|
def set(self, location):
|
||||||
|
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
if location not in detect_list:
|
||||||
|
detect_list.append(location)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
118
fHDHR/device/tuners/__init__.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.epg = epg
|
||||||
|
self.max_tuners = int(self.fhdhr.config.dict["fhdhr"]["tuner_count"])
|
||||||
|
|
||||||
|
self.tuners = {}
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Creating %s tuners." % str(self.max_tuners))
|
||||||
|
|
||||||
|
for i in range(0, self.max_tuners):
|
||||||
|
self.tuners[str(i)] = Tuner(fhdhr, i, epg)
|
||||||
|
|
||||||
|
def get_available_tuner(self):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners.keys()) if not self.tuners[tunernum].tuner_lock.locked()) or None
|
||||||
|
|
||||||
|
def get_scanning_tuner(self):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners.keys()) if self.tuners[tunernum].status["status"] == "Scanning") or None
|
||||||
|
|
||||||
|
def stop_tuner_scan(self):
|
||||||
|
tunernum = self.get_scanning_tuner()
|
||||||
|
if tunernum:
|
||||||
|
self.tuners[str(tunernum)].close()
|
||||||
|
|
||||||
|
def tuner_scan(self):
|
||||||
|
"""Temporarily use a tuner for a scan"""
|
||||||
|
if not self.available_tuner_count():
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
tunernumber = self.get_available_tuner()
|
||||||
|
self.tuners[str(tunernumber)].channel_scan()
|
||||||
|
|
||||||
|
if not tunernumber:
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
def tuner_grab(self, tuner_number, channel_number):
|
||||||
|
|
||||||
|
if str(tuner_number) not in list(self.tuners.keys()):
|
||||||
|
self.fhdhr.logger.error("Tuner %s does not exist." % str(tuner_number))
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
# TunerError will raise if unavailable
|
||||||
|
self.tuners[str(tuner_number)].grab(channel_number)
|
||||||
|
|
||||||
|
return tuner_number
|
||||||
|
|
||||||
|
def first_available(self, channel_number, dograb=True):
|
||||||
|
|
||||||
|
if not self.available_tuner_count():
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
tunernumber = self.get_available_tuner()
|
||||||
|
|
||||||
|
if not tunernumber:
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
else:
|
||||||
|
self.tuners[str(tunernumber)].grab(channel_number)
|
||||||
|
return tunernumber
|
||||||
|
|
||||||
|
def tuner_close(self, tunernum):
|
||||||
|
self.tuners[str(tunernum)].close()
|
||||||
|
|
||||||
|
def status(self):
|
||||||
|
all_status = {}
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
all_status[tunernum] = self.tuners[str(tunernum)].get_status()
|
||||||
|
return all_status
|
||||||
|
|
||||||
|
def available_tuner_count(self):
|
||||||
|
available_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
if not self.tuners[str(tunernum)].tuner_lock.locked():
|
||||||
|
available_tuners += 1
|
||||||
|
return available_tuners
|
||||||
|
|
||||||
|
def inuse_tuner_count(self):
|
||||||
|
inuse_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
if self.tuners[str(tunernum)].tuner_lock.locked():
|
||||||
|
inuse_tuners += 1
|
||||||
|
return inuse_tuners
|
||||||
|
|
||||||
|
def get_stream_info(self, stream_args):
|
||||||
|
|
||||||
|
stream_info = self.channels.get_channel_stream(stream_args)
|
||||||
|
if not stream_info:
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
if isinstance(stream_info, str):
|
||||||
|
stream_info = {"url": stream_info}
|
||||||
|
stream_args["stream_info"] = stream_info
|
||||||
|
|
||||||
|
if not stream_args["stream_info"]["url"]:
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
if stream_args["stream_info"]["url"].startswith("udp://"):
|
||||||
|
stream_args["true_content_type"] = "video/mpeg"
|
||||||
|
stream_args["content_type"] = "video/mpeg"
|
||||||
|
else:
|
||||||
|
|
||||||
|
channel_stream_url_headers = self.fhdhr.web.session.head(stream_args["stream_info"]["url"]).headers
|
||||||
|
stream_args["true_content_type"] = channel_stream_url_headers['Content-Type']
|
||||||
|
|
||||||
|
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
|
stream_args["content_type"] = "video/mpeg"
|
||||||
|
else:
|
||||||
|
stream_args["content_type"] = stream_args["true_content_type"]
|
||||||
|
|
||||||
|
return stream_args
|
||||||
27
fHDHR/device/tuners/stream/__init__.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .direct_stream import Direct_Stream
|
||||||
|
from .direct_m3u8_stream import Direct_M3U8_Stream
|
||||||
|
from .ffmpeg_stream import FFMPEG_Stream
|
||||||
|
from .vlc_stream import VLC_Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
|
||||||
|
if stream_args["method"] == "ffmpeg":
|
||||||
|
self.method = FFMPEG_Stream(fhdhr, stream_args, tuner)
|
||||||
|
if stream_args["method"] == "vlc":
|
||||||
|
self.method = VLC_Stream(fhdhr, stream_args, tuner)
|
||||||
|
elif (stream_args["method"] == "direct" and
|
||||||
|
not self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
|
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||||
|
elif (stream_args["method"] == "direct" and
|
||||||
|
self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
|
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.method.get()
|
||||||
100
fHDHR/device/tuners/stream/direct_m3u8_stream.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import m3u8
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Direct_M3U8_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
if not self.stream_args["duration"] == 0:
|
||||||
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Detected stream URL is m3u8: %s" % self.stream_args["true_content_type"])
|
||||||
|
|
||||||
|
channel_stream_url = self.stream_args["stream_info"]["url"]
|
||||||
|
while True:
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Opening m3u8 for reading %s" % channel_stream_url)
|
||||||
|
videoUrlM3u = m3u8.load(channel_stream_url)
|
||||||
|
if len(videoUrlM3u.playlists):
|
||||||
|
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
||||||
|
channel_stream_url = videoUrlM3u.playlists[0].absolute_uri
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
played_chunk_urls = []
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
playlist = m3u8.load(channel_stream_url)
|
||||||
|
segments = playlist.segments
|
||||||
|
|
||||||
|
if len(played_chunk_urls):
|
||||||
|
newsegments = 0
|
||||||
|
for segment in segments:
|
||||||
|
if segment.absolute_uri not in played_chunk_urls:
|
||||||
|
newsegments += 1
|
||||||
|
self.fhdhr.logger.info("Refreshing m3u8, Loaded %s new segments." % str(newsegments))
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Loaded %s segments." % str(len(segments)))
|
||||||
|
|
||||||
|
if playlist.keys != [None]:
|
||||||
|
keys = [{"url": key.absolute_uri, "method": key.method, "iv": key.iv} for key in playlist.keys if key]
|
||||||
|
else:
|
||||||
|
keys = [None for i in range(0, len(segments))]
|
||||||
|
|
||||||
|
for segment, key in zip(segments, keys):
|
||||||
|
chunkurl = segment.absolute_uri
|
||||||
|
|
||||||
|
if chunkurl and chunkurl not in played_chunk_urls:
|
||||||
|
played_chunk_urls.append(chunkurl)
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
if key:
|
||||||
|
if key["url"]:
|
||||||
|
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||||
|
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||||
|
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
||||||
|
chunk = cryptor.decrypt(chunk)
|
||||||
|
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s: %s" % (len(played_chunk_urls), chunk_size, chunkurl))
|
||||||
|
yield chunk
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
|
finally:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
64
fHDHR/device/tuners/stream/direct_stream.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Direct_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
if not self.stream_args["duration"] == 0:
|
||||||
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
chunk_counter = 1
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
for chunk in req.iter_content(chunk_size=self.bytes_per_read):
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
req.close()
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s" % (chunk_counter, chunk_size))
|
||||||
|
yield chunk
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
|
||||||
|
chunk_counter += 1
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
|
finally:
|
||||||
|
req.close()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
132
fHDHR/device/tuners/stream/ffmpeg_stream.py
Normal file
@ -0,0 +1,132 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class FFMPEG_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||||
|
self.ffmpeg_command = self.ffmpeg_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(self.ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = ffmpeg_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
|
finally:
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
ffmpeg_proc.kill()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def ffmpeg_command_assemble(self, stream_args):
|
||||||
|
ffmpeg_command = [
|
||||||
|
self.fhdhr.config.dict["ffmpeg"]["path"],
|
||||||
|
"-i", stream_args["stream_info"]["url"],
|
||||||
|
]
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_duration(stream_args))
|
||||||
|
ffmpeg_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_loglevel())
|
||||||
|
ffmpeg_command.extend(["pipe:stdout"])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_duration(self, stream_args):
|
||||||
|
ffmpeg_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
ffmpeg_command.extend(["-t", str(stream_args["duration"])])
|
||||||
|
else:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-reconnect", "1",
|
||||||
|
"-reconnect_at_eof", "1",
|
||||||
|
"-reconnect_streamed", "1",
|
||||||
|
"-reconnect_delay_max", "2",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_loglevel(self):
|
||||||
|
ffmpeg_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "debug",
|
||||||
|
"info": "info",
|
||||||
|
"error": "error",
|
||||||
|
"warning": "warning",
|
||||||
|
"critical": "fatal",
|
||||||
|
}
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
ffmpeg_command.extend(["-nostats", "-hide_banner"])
|
||||||
|
ffmpeg_command.extend(["-loglevel", loglevel_dict[log_level]])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a %s transcode for stream." % stream_args["transcode"])
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
ffmpeg_command = []
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-c", "copy",
|
||||||
|
"-f", "mpegts",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
122
fHDHR/device/tuners/stream/vlc_stream.py
Normal file
@ -0,0 +1,122 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class VLC_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
|
||||||
|
self.vlc_command = self.vlc_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(self.vlc_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = vlc_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: %s" % e)
|
||||||
|
finally:
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
vlc_proc.kill()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def vlc_command_assemble(self, stream_args):
|
||||||
|
vlc_command = [
|
||||||
|
self.fhdhr.config.dict["vlc"]["path"],
|
||||||
|
"-I", "dummy", stream_args["stream_info"]["url"],
|
||||||
|
]
|
||||||
|
vlc_command.extend(self.vlc_duration(stream_args))
|
||||||
|
vlc_command.extend(self.vlc_loglevel())
|
||||||
|
vlc_command.extend(["--sout"])
|
||||||
|
vlc_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_duration(self, stream_args):
|
||||||
|
vlc_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
vlc_command.extend(["--run-time=%s" % str(stream_args["duration"])])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_loglevel(self):
|
||||||
|
vlc_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "3",
|
||||||
|
"info": "0",
|
||||||
|
"error": "1",
|
||||||
|
"warning": "2",
|
||||||
|
"critical": "1",
|
||||||
|
}
|
||||||
|
vlc_command.extend(["--log-verbose=", loglevel_dict[log_level]])
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
vlc_command.extend(["--quiet"])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
vlc_command = []
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a %s transcode for stream." % stream_args["transcode"])
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
vlc_transcode_string = "#std{mux=ts,access=file,dst=-}"
|
||||||
|
return [vlc_transcode_string]
|
||||||
|
|
||||||
|
'#transcode{vcodec=mp2v,vb=4096,acodec=mp2a,ab=192,scale=1,channels=2,deinterlace}:std{access=file,mux=ts,dst=-"}'
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
vlc_command.extend([])
|
||||||
|
|
||||||
|
return vlc_command
|
||||||
98
fHDHR/device/tuners/tuner.py
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
import threading
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
from .stream import Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
def __init__(self, fhdhr, inum, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.number = inum
|
||||||
|
self.epg = epg
|
||||||
|
|
||||||
|
self.tuner_lock = threading.Lock()
|
||||||
|
self.set_off_status()
|
||||||
|
|
||||||
|
self.chanscan_url = "/api/channels?method=scan"
|
||||||
|
self.close_url = "/api/tuners?method=close&tuner=%s" % str(self.number)
|
||||||
|
|
||||||
|
def channel_scan(self, grabbed=False):
|
||||||
|
if self.tuner_lock.locked() and not grabbed:
|
||||||
|
self.fhdhr.logger.error("Tuner #%s is not available." % str(self.number))
|
||||||
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
|
||||||
|
if self.status["status"] == "Scanning":
|
||||||
|
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
||||||
|
else:
|
||||||
|
|
||||||
|
if not grabbed:
|
||||||
|
self.tuner_lock.acquire()
|
||||||
|
self.status["status"] = "Scanning"
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan." % str(self.number))
|
||||||
|
|
||||||
|
chanscan = threading.Thread(target=self.runscan)
|
||||||
|
chanscan.start()
|
||||||
|
|
||||||
|
def runscan(self):
|
||||||
|
self.fhdhr.api.get(self.chanscan_url)
|
||||||
|
self.fhdhr.logger.info("Requested Channel Scan Complete.")
|
||||||
|
self.close()
|
||||||
|
self.fhdhr.api.get(self.close_url)
|
||||||
|
|
||||||
|
def add_downloaded_size(self, bytes_count):
|
||||||
|
if "downloaded" in list(self.status.keys()):
|
||||||
|
self.status["downloaded"] += bytes_count
|
||||||
|
|
||||||
|
def grab(self, channel_number):
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
|
||||||
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
self.tuner_lock.acquire()
|
||||||
|
self.status["status"] = "Acquired"
|
||||||
|
self.status["channel"] = channel_number
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.set_off_status()
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.tuner_lock.release()
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Released." % self.number)
|
||||||
|
|
||||||
|
def get_status(self):
|
||||||
|
current_status = self.status.copy()
|
||||||
|
if current_status["status"] == "Active":
|
||||||
|
current_status["Play Time"] = str(
|
||||||
|
humanized_time(
|
||||||
|
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||||
|
current_status["time_start"] = str(current_status["time_start"])
|
||||||
|
current_status["epg"] = self.epg.whats_on_now(current_status["channel"])
|
||||||
|
return current_status
|
||||||
|
|
||||||
|
def set_off_status(self):
|
||||||
|
self.status = {"status": "Inactive"}
|
||||||
|
|
||||||
|
def get_stream(self, stream_args, tuner):
|
||||||
|
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||||
|
return stream.get()
|
||||||
|
|
||||||
|
def set_status(self, stream_args):
|
||||||
|
if self.status["status"] != "Active":
|
||||||
|
self.status = {
|
||||||
|
"status": "Active",
|
||||||
|
"clients": [],
|
||||||
|
"clients_id": [],
|
||||||
|
"method": stream_args["method"],
|
||||||
|
"accessed": [stream_args["accessed"]],
|
||||||
|
"channel": stream_args["channel"],
|
||||||
|
"proxied_url": stream_args["stream_info"]["url"],
|
||||||
|
"time_start": datetime.datetime.utcnow(),
|
||||||
|
"downloaded": 0
|
||||||
|
}
|
||||||
|
if stream_args["client"] not in self.status["clients"]:
|
||||||
|
self.status["clients"].append(stream_args["client"])
|
||||||
|
if stream_args["client_id"] not in self.status["clients_id"]:
|
||||||
|
self.status["clients_id"].append(stream_args["client_id"])
|
||||||
40
fHDHR/exceptions/__init__.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
|
||||||
|
class TunerError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'TunerError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class OriginSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'OriginSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class EPGSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'EPGSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'ConfigurationError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationNotFound(ConfigurationError):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(ConfigurationNotFound, self).__init__(None)
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'Unable to find the configuration file %s' % self.filename
|
||||||
56
fHDHR/originwrapper/__init__.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from .origin_channels_standin import OriginChannels_StandIN
|
||||||
|
from .origin_epg_standin import OriginEPG_StandIN
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginServiceWrapper():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
|
self.servicename = fhdhr.config.dict["main"]["servicename"]
|
||||||
|
|
||||||
|
self.setup_success = None
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.originservice = self.origin.OriginService(self.fhdhr)
|
||||||
|
self.setup_success = True
|
||||||
|
self.fhdhr.logger.info("%s Setup Success" % self.servicename)
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.originservice = None
|
||||||
|
self.fhdhr.logger.error(e)
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
self.channels = self.origin.OriginChannels(self.fhdhr, self.originservice)
|
||||||
|
self.epg = self.origin.OriginEPG(self.fhdhr)
|
||||||
|
else:
|
||||||
|
self.channels = OriginChannels_StandIN()
|
||||||
|
self.epg = OriginEPG_StandIN()
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return self.channels.get_channels()
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict, stream_args):
|
||||||
|
return self.channels.get_channel_stream(chandict, stream_args)
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return self.epg.update_epg(channels)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr.%s" % name)
|
||||||
|
if hasattr(self.originservice, name):
|
||||||
|
return eval("self.originservice.%s" % name)
|
||||||
|
elif hasattr(self.channels, name):
|
||||||
|
return eval("self.channels.%s" % name)
|
||||||
|
elif hasattr(self.epg, name):
|
||||||
|
return eval("self.epg.%s" % name)
|
||||||
|
else:
|
||||||
|
raise AttributeError(name)
|
||||||
11
fHDHR/originwrapper/origin_channels_standin.py
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class OriginChannels_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict, stream_args):
|
||||||
|
return None
|
||||||
8
fHDHR/originwrapper/origin_epg_standin.py
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class OriginEPG_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return {}
|
||||||
154
fHDHR/tools/__init__.py
Normal file
@ -0,0 +1,154 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import ast
|
||||||
|
import requests
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
UNARY_OPS = (ast.UAdd, ast.USub)
|
||||||
|
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
||||||
|
|
||||||
|
|
||||||
|
def channel_sort(channel_list):
|
||||||
|
"""Take a list of channel number strings and sort the Numbers and SubNumbers"""
|
||||||
|
chan_dict_list_split = {}
|
||||||
|
for number in channel_list:
|
||||||
|
try:
|
||||||
|
subnumber = number.split(".")[1]
|
||||||
|
except IndexError:
|
||||||
|
subnumber = None
|
||||||
|
prinumber = number.split(".")[0]
|
||||||
|
chan_dict_list_split[number] = {"number": prinumber, "subnumber": subnumber}
|
||||||
|
return sorted(chan_dict_list_split, key=lambda i: (int(chan_dict_list_split[i]['number']), int(chan_dict_list_split[i]['subnumber'] or 0)))
|
||||||
|
|
||||||
|
|
||||||
|
def is_docker():
|
||||||
|
path = "/proc/self/cgroup"
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return False
|
||||||
|
with open(path) as f:
|
||||||
|
for line in f:
|
||||||
|
if re.match("\d+:[\w=]+:/docker(-[ce]e)?/\w+", line):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def sub_el(parent, sub_el_item_name, text=None, **kwargs):
|
||||||
|
el = xml.etree.ElementTree.SubElement(parent, sub_el_item_name, **kwargs)
|
||||||
|
if text:
|
||||||
|
el.text = text
|
||||||
|
return el
|
||||||
|
|
||||||
|
|
||||||
|
def xmldictmaker(inputdict, req_items, list_items=[], str_items=[]):
|
||||||
|
xml_dict = {}
|
||||||
|
|
||||||
|
for origitem in list(inputdict.keys()):
|
||||||
|
xml_dict[origitem] = inputdict[origitem]
|
||||||
|
|
||||||
|
for req_item in req_items:
|
||||||
|
if req_item not in list(inputdict.keys()):
|
||||||
|
xml_dict[req_item] = None
|
||||||
|
if not xml_dict[req_item]:
|
||||||
|
if req_item in list_items:
|
||||||
|
xml_dict[req_item] = []
|
||||||
|
elif req_item in str_items:
|
||||||
|
xml_dict[req_item] = ""
|
||||||
|
|
||||||
|
return xml_dict
|
||||||
|
|
||||||
|
|
||||||
|
def is_arithmetic(s):
|
||||||
|
def _is_arithmetic(node):
|
||||||
|
if isinstance(node, ast.Num):
|
||||||
|
return True
|
||||||
|
elif isinstance(node, ast.Expression):
|
||||||
|
return _is_arithmetic(node.body)
|
||||||
|
elif isinstance(node, ast.UnaryOp):
|
||||||
|
valid_op = isinstance(node.op, UNARY_OPS)
|
||||||
|
return valid_op and _is_arithmetic(node.operand)
|
||||||
|
elif isinstance(node, ast.BinOp):
|
||||||
|
valid_op = isinstance(node.op, BINARY_OPS)
|
||||||
|
return valid_op and _is_arithmetic(node.left) and _is_arithmetic(node.right)
|
||||||
|
else:
|
||||||
|
raise ValueError('Unsupported type {}'.format(node))
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _is_arithmetic(ast.parse(s, mode='eval'))
|
||||||
|
except (SyntaxError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def isint(x):
|
||||||
|
try:
|
||||||
|
a = float(x)
|
||||||
|
b = int(a)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return a == b
|
||||||
|
|
||||||
|
|
||||||
|
def isfloat(x):
|
||||||
|
try:
|
||||||
|
float(x)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def hours_between_datetime(first_time, later_time):
|
||||||
|
timebetween = first_time - later_time
|
||||||
|
return (timebetween.total_seconds() / 60 / 60)
|
||||||
|
|
||||||
|
|
||||||
|
def humanized_filesize(size, decimal_places=2):
|
||||||
|
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']:
|
||||||
|
if size < 1024.0 or unit == 'YiB':
|
||||||
|
break
|
||||||
|
size /= 1024.0
|
||||||
|
return f"{size:.{decimal_places}f} {unit}"
|
||||||
|
|
||||||
|
|
||||||
|
def humanized_time(countdownseconds):
|
||||||
|
time = float(countdownseconds)
|
||||||
|
if time == 0:
|
||||||
|
return "just now"
|
||||||
|
year = time // (365 * 24 * 3600)
|
||||||
|
time = time % (365 * 24 * 3600)
|
||||||
|
day = time // (24 * 3600)
|
||||||
|
time = time % (24 * 3600)
|
||||||
|
time = time % (24 * 3600)
|
||||||
|
hour = time // 3600
|
||||||
|
time %= 3600
|
||||||
|
minute = time // 60
|
||||||
|
time %= 60
|
||||||
|
second = time
|
||||||
|
displaymsg = None
|
||||||
|
timearray = ['year', 'day', 'hour', 'minute', 'second']
|
||||||
|
for x in timearray:
|
||||||
|
currenttimevar = eval(x)
|
||||||
|
if currenttimevar >= 1:
|
||||||
|
timetype = x
|
||||||
|
if currenttimevar > 1:
|
||||||
|
timetype = str(x+"s")
|
||||||
|
if displaymsg:
|
||||||
|
displaymsg = "%s %s %s" % (displaymsg, int(currenttimevar), timetype)
|
||||||
|
else:
|
||||||
|
displaymsg = "%s %s" % (int(currenttimevar), timetype)
|
||||||
|
if not displaymsg:
|
||||||
|
return "just now"
|
||||||
|
return displaymsg
|
||||||
|
# just for ignoring a pep error
|
||||||
|
year, day, hour, minute, second
|
||||||
|
|
||||||
|
|
||||||
|
class WebReq():
|
||||||
|
def __init__(self):
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.exceptions = requests.exceptions
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.session, name):
|
||||||
|
return eval("self.session.%s" % name)
|
||||||
227
fHDHR_web/__init__.py
Normal file
@ -0,0 +1,227 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import Flask, request, session
|
||||||
|
import threading
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from .pages import fHDHR_Pages
|
||||||
|
from .files import fHDHR_Files
|
||||||
|
from .brython import fHDHR_Brython
|
||||||
|
from .hdhr import fHDHR_HDHR
|
||||||
|
from .rmg import fHDHR_RMG
|
||||||
|
from .api import fHDHR_API
|
||||||
|
|
||||||
|
|
||||||
|
fHDHR_web_VERSION = "v0.8.0-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HTTP_Server():
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading Flask.")
|
||||||
|
|
||||||
|
self.fhdhr.app = Flask("fHDHR", template_folder=self.template_folder)
|
||||||
|
self.instance_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
# Allow Internal API Usage
|
||||||
|
self.fhdhr.app.testing = True
|
||||||
|
self.fhdhr.api.client = self.fhdhr.app.test_client()
|
||||||
|
|
||||||
|
# Set Secret Key For Sessions
|
||||||
|
self.fhdhr.app.secret_key = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
|
||||||
|
self.route_list = {}
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Pages Endpoints.")
|
||||||
|
self.pages = fHDHR_Pages(fhdhr)
|
||||||
|
self.add_endpoints(self.pages, "pages")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Files Endpoints.")
|
||||||
|
self.files = fHDHR_Files(fhdhr)
|
||||||
|
self.add_endpoints(self.files, "files")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Brython Endpoints.")
|
||||||
|
self.brython = fHDHR_Brython(fhdhr)
|
||||||
|
self.add_endpoints(self.brython, "brython")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP HDHR Endpoints.")
|
||||||
|
self.hdhr = fHDHR_HDHR(fhdhr)
|
||||||
|
self.add_endpoints(self.hdhr, "hdhr")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP RMG Endpoints.")
|
||||||
|
self.rmg = fHDHR_RMG(fhdhr)
|
||||||
|
self.add_endpoints(self.rmg, "rmg")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP API Endpoints.")
|
||||||
|
self.api = fHDHR_API(fhdhr)
|
||||||
|
self.add_endpoints(self.api, "api")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Origin Endpoints.")
|
||||||
|
self.origin_endpoints = self.fhdhr.originwrapper.origin.origin_web.fHDHR_Origin_Web(fhdhr)
|
||||||
|
self.add_endpoints(self.origin_endpoints, "origin_endpoints")
|
||||||
|
|
||||||
|
self.fhdhr.app.before_request(self.before_request)
|
||||||
|
self.fhdhr.app.after_request(self.after_request)
|
||||||
|
self.fhdhr.app.before_first_request(self.before_first_request)
|
||||||
|
|
||||||
|
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self.fhdhr.logger.info("Flask HTTP Thread Starting")
|
||||||
|
self.fhdhr.threads["flask"].start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self.fhdhr.logger.info("Flask HTTP Thread Stopping")
|
||||||
|
self.http.stop()
|
||||||
|
|
||||||
|
def before_first_request(self):
|
||||||
|
self.fhdhr.logger.info("HTTP Server Online.")
|
||||||
|
|
||||||
|
def before_request(self):
|
||||||
|
|
||||||
|
session["session_id"] = str(uuid.uuid4())
|
||||||
|
session["instance_id"] = self.instance_id
|
||||||
|
session["route_list"] = self.route_list
|
||||||
|
|
||||||
|
session["is_internal_api"] = self.detect_internal_api(request)
|
||||||
|
if session["is_internal_api"]:
|
||||||
|
self.fhdhr.logger.debug("Client is using internal API call.")
|
||||||
|
|
||||||
|
session["is_mobile"] = self.detect_mobile(request)
|
||||||
|
if session["is_mobile"]:
|
||||||
|
self.fhdhr.logger.debug("Client is a mobile device.")
|
||||||
|
|
||||||
|
session["is_plexmediaserver"] = self.detect_plexmediaserver(request)
|
||||||
|
if session["is_plexmediaserver"]:
|
||||||
|
self.fhdhr.logger.debug("Client is a Plex Media Server.")
|
||||||
|
|
||||||
|
session["deviceauth"] = self.detect_plexmediaserver(request)
|
||||||
|
|
||||||
|
session["tuner_used"] = None
|
||||||
|
|
||||||
|
session["restart"] = False
|
||||||
|
|
||||||
|
self.fhdhr.logger.debug("Client %s requested %s Opening" % (request.method, request.path))
|
||||||
|
|
||||||
|
def after_request(self, response):
|
||||||
|
|
||||||
|
# Close Tuner if it was in use, and did not close already
|
||||||
|
# if session["tuner_used"] is not None:
|
||||||
|
# tuner = self.fhdhr.device.tuners.tuners[str(session["tuner_used"])]
|
||||||
|
# if tuner.tuner_lock.locked():
|
||||||
|
# self.fhdhr.logger.info("Shutting down Tuner #%s after Request." % session["tuner_used"])
|
||||||
|
# tuner.close()
|
||||||
|
|
||||||
|
self.fhdhr.logger.debug("Client %s requested %s Closing" % (request.method, request.path))
|
||||||
|
if not session["restart"]:
|
||||||
|
return response
|
||||||
|
else:
|
||||||
|
return self.stop()
|
||||||
|
|
||||||
|
def detect_internal_api(self, request):
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
if not user_agent:
|
||||||
|
return False
|
||||||
|
elif str(user_agent).lower().startswith("fhdhr"):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def detect_deviceauth(self, request):
|
||||||
|
return request.args.get('DeviceAuth', default=None, type=str)
|
||||||
|
|
||||||
|
def detect_mobile(self, request):
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
phones = ["iphone", "android", "blackberry"]
|
||||||
|
if not user_agent:
|
||||||
|
return False
|
||||||
|
elif any(phone in user_agent.lower() for phone in phones):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def detect_plexmediaserver(self, request):
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
if not user_agent:
|
||||||
|
return False
|
||||||
|
elif str(user_agent).lower().startswith("plexmediaserver"):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def add_endpoints(self, index_list, index_name):
|
||||||
|
|
||||||
|
if index_name not in list(self.route_list.keys()):
|
||||||
|
self.route_list[index_name] = {}
|
||||||
|
|
||||||
|
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||||
|
for item in item_list:
|
||||||
|
endpoints = eval("self.%s.%s.%s" % (index_name, item, "endpoints"))
|
||||||
|
if isinstance(endpoints, str):
|
||||||
|
endpoints = [endpoints]
|
||||||
|
handler = eval("self.%s.%s" % (index_name, item))
|
||||||
|
endpoint_name = eval("self.%s.%s.%s" % (index_name, item, "endpoint_name"))
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoint_methods = eval("self.%s.%s.%s" % (index_name, item, "endpoint_methods"))
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_methods = ['GET']
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoint_access_level = eval("self.%s.%s.%s" % (index_name, item, "endpoint_access_level"))
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_access_level = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
pretty_name = eval("self.%s.%s.%s" % (index_name, item, "pretty_name"))
|
||||||
|
except AttributeError:
|
||||||
|
pretty_name = endpoint_name
|
||||||
|
|
||||||
|
try:
|
||||||
|
endpoint_default_parameters = eval("self.%s.%s.%s" % (index_name, item, "endpoint_default_parameters"))
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_default_parameters = {}
|
||||||
|
|
||||||
|
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||||
|
|
||||||
|
if endpoint_name not in list(self.route_list[index_name].keys()):
|
||||||
|
self.route_list[index_name][endpoint_name] = {}
|
||||||
|
self.route_list[index_name][endpoint_name]["name"] = endpoint_name
|
||||||
|
self.route_list[index_name][endpoint_name]["endpoints"] = endpoints
|
||||||
|
self.route_list[index_name][endpoint_name]["endpoint_methods"] = endpoint_methods
|
||||||
|
self.route_list[index_name][endpoint_name]["endpoint_access_level"] = endpoint_access_level
|
||||||
|
self.route_list[index_name][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
|
||||||
|
self.route_list[index_name][endpoint_name]["pretty_name"] = pretty_name
|
||||||
|
|
||||||
|
for endpoint in endpoints:
|
||||||
|
self.add_endpoint(endpoint=endpoint,
|
||||||
|
endpoint_name=endpoint_name,
|
||||||
|
handler=handler,
|
||||||
|
methods=endpoint_methods)
|
||||||
|
|
||||||
|
def isapath(self, item):
|
||||||
|
not_a_page_list = ["fhdhr"]
|
||||||
|
if item in not_a_page_list:
|
||||||
|
return False
|
||||||
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||||
|
self.fhdhr.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
|
||||||
|
self.http = WSGIServer(self.fhdhr.api.address_tuple,
|
||||||
|
self.fhdhr.app.wsgi_app,
|
||||||
|
log=self.fhdhr.logger)
|
||||||
|
try:
|
||||||
|
self.http.serve_forever()
|
||||||
|
self.stop()
|
||||||
|
except AttributeError:
|
||||||
|
self.fhdhr.logger.info("HTTP Server Offline")
|
||||||
42
fHDHR_web/api/__init__.py
Normal file
@ -0,0 +1,42 @@
|
|||||||
|
|
||||||
|
from .root_url import Root_URL
|
||||||
|
from .startup_tasks import Startup_Tasks
|
||||||
|
|
||||||
|
from .cluster import Cluster
|
||||||
|
from .settings import Settings
|
||||||
|
from .channels import Channels
|
||||||
|
from .xmltv import xmlTV
|
||||||
|
from .m3u import M3U
|
||||||
|
from .w3u import W3U
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .debug import Debug_JSON
|
||||||
|
from .tools import API_Tools
|
||||||
|
|
||||||
|
from .route_list import Route_List
|
||||||
|
|
||||||
|
from .images import Images
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.root_url = Root_URL(fhdhr)
|
||||||
|
self.startup_tasks = Startup_Tasks(fhdhr)
|
||||||
|
|
||||||
|
self.cluster = Cluster(fhdhr)
|
||||||
|
self.settings = Settings(fhdhr)
|
||||||
|
self.channels = Channels(fhdhr)
|
||||||
|
self.xmltv = xmlTV(fhdhr)
|
||||||
|
self.m3u = M3U(fhdhr)
|
||||||
|
self.w3u = W3U(fhdhr)
|
||||||
|
self.epg = EPG(fhdhr)
|
||||||
|
self.tuners = Tuners(fhdhr)
|
||||||
|
self.debug = Debug_JSON(fhdhr)
|
||||||
|
self.tools = API_Tools(fhdhr)
|
||||||
|
|
||||||
|
self.route_list = Route_List(fhdhr)
|
||||||
|
|
||||||
|
self.images = Images(fhdhr)
|
||||||
156
fHDHR_web/api/channels.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
from flask import request, redirect, Response, abort
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
endpoints = ["/api/channels"]
|
||||||
|
endpoint_name = "api_channels"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
endpoint_default_parameters = {
|
||||||
|
"method": "get"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
channels_info = {}
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["m3u_url"] = channel_obj.m3u_url
|
||||||
|
channel_dict["stream_url"] = channel_obj.api_stream_url
|
||||||
|
channels_info[channel_obj.number] = channel_dict
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
||||||
|
sorted_chan_guide = []
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
sorted_chan_guide.append(channels_info[channel])
|
||||||
|
|
||||||
|
channels_info_json = json.dumps(sorted_chan_guide, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_info_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "favorite":
|
||||||
|
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if not channel:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if channel.startstwith(tuple(["+", "-", "x"])):
|
||||||
|
|
||||||
|
channel_method = channel[0]
|
||||||
|
channel_number = channel[1:]
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if channel_method == "+":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "-":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "x":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
||||||
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
|
elif method in ["enable", "disable"]:
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if channel == "all":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement_all(method)
|
||||||
|
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
else:
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel, method)
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
channel_id = request.form.get('id', None)
|
||||||
|
updatedict = {}
|
||||||
|
for key in list(request.form.keys()):
|
||||||
|
if key != "id":
|
||||||
|
if key in ["name", "callsign", "thumbnail"]:
|
||||||
|
updatedict[key] = str(request.form.get(key))
|
||||||
|
elif key in ["number"]:
|
||||||
|
number = str(request.form.get(key))
|
||||||
|
if "." in number:
|
||||||
|
updatedict["subnumber"] = number.split(".")[1]
|
||||||
|
updatedict["number"] = number.split(".")[0]
|
||||||
|
else:
|
||||||
|
updatedict["number"] = number
|
||||||
|
elif key in ["enabled"]:
|
||||||
|
confvalue = request.form.get(key)
|
||||||
|
if str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
updatedict[key] = confvalue
|
||||||
|
elif key in ["favorite", "HD"]:
|
||||||
|
updatedict[key] = int(request.form.get(key))
|
||||||
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
||||||
|
|
||||||
|
elif method == "modify":
|
||||||
|
channels_list = json.loads(request.form.get('channels', []))
|
||||||
|
for channel in channels_list:
|
||||||
|
updatedict = {}
|
||||||
|
for key in list(channel.keys()):
|
||||||
|
if key != "id":
|
||||||
|
if key in ["name", "callsign", "thumbnail"]:
|
||||||
|
updatedict[key] = str(channel[key])
|
||||||
|
elif key in ["number"]:
|
||||||
|
number = str(channel[key])
|
||||||
|
if "." in number:
|
||||||
|
updatedict["subnumber"] = number.split(".")[1]
|
||||||
|
updatedict["number"] = number.split(".")[0]
|
||||||
|
else:
|
||||||
|
updatedict["number"] = number
|
||||||
|
elif key in ["enabled"]:
|
||||||
|
confvalue = channel[key]
|
||||||
|
if str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
updatedict[key] = confvalue
|
||||||
|
elif key in ["favorite", "HD"]:
|
||||||
|
updatedict[key] = int(channel[key])
|
||||||
|
else:
|
||||||
|
channel_id = str(channel[key])
|
||||||
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.channels.get_channels(forceupdate=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
if method == "scan":
|
||||||
|
return redirect('/lineup_status.json')
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
59
fHDHR_web/api/cluster.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster():
|
||||||
|
endpoints = ["/api/cluster"]
|
||||||
|
endpoint_name = "api_cluster"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
endpoint_default_parameters = {
|
||||||
|
"method": "get"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
location = request.args.get("location", default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||||
|
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.ssdp.m_search()
|
||||||
|
|
||||||
|
elif method == 'add':
|
||||||
|
self.fhdhr.device.cluster.add(location)
|
||||||
|
elif method == 'del':
|
||||||
|
self.fhdhr.device.cluster.remove(location)
|
||||||
|
|
||||||
|
elif method == 'sync':
|
||||||
|
self.fhdhr.device.cluster.sync(location)
|
||||||
|
|
||||||
|
elif method == 'leave':
|
||||||
|
self.fhdhr.device.cluster.leave()
|
||||||
|
elif method == 'disconnect':
|
||||||
|
self.fhdhr.device.cluster.disconnect()
|
||||||
|
|
||||||
|
elif method == 'alive':
|
||||||
|
self.fhdhr.device.ssdp.do_alive(forcealive=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
29
fHDHR_web/api/debug.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from flask import request, Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Debug_JSON():
|
||||||
|
endpoints = ["/api/debug"]
|
||||||
|
endpoint_name = "api_debug"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
debugjson = {
|
||||||
|
"base_url": base_url,
|
||||||
|
"total channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(),
|
||||||
|
}
|
||||||
|
cluster_json = json.dumps(debugjson, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
60
fHDHR_web/api/epg.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/epg"]
|
||||||
|
endpoint_name = "api_epg"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[chan_obj.number]["number"] = chan_obj.number
|
||||||
|
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
|
||||||
|
epg_json = json.dumps(epgdict, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=epg_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
49
fHDHR_web/api/images.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from flask import request, Response, abort
|
||||||
|
|
||||||
|
|
||||||
|
class Images():
|
||||||
|
endpoints = ["/api/images"]
|
||||||
|
endpoint_name = "api_images"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
endpoint_default_parameters = {
|
||||||
|
"method": "generate",
|
||||||
|
"type": "content",
|
||||||
|
"message": "Internal Image Handling"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
image = None
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
if method == "generate":
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
message = request.args.get('message', default="Unknown Request", type=str)
|
||||||
|
image = self.fhdhr.device.images.generate_image(image_type, message)
|
||||||
|
|
||||||
|
elif method == "get":
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
|
if source in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
|
if image_id:
|
||||||
|
image = self.fhdhr.device.images.get_epg_image(image_type, image_id)
|
||||||
|
|
||||||
|
else:
|
||||||
|
image = self.fhdhr.device.images.generate_image("content", "Unknown Request")
|
||||||
|
|
||||||
|
if image:
|
||||||
|
imagemimetype = self.fhdhr.device.images.get_image_type(image)
|
||||||
|
return Response(image, content_type=imagemimetype, direct_passthrough=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid image request")
|
||||||
100
fHDHR_web/api/m3u.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort
|
||||||
|
|
||||||
|
|
||||||
|
class M3U():
|
||||||
|
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||||
|
endpoint_name = "api_m3u"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||||
|
|
||||||
|
fakefile.write("%s url-tvg=\"%s\" x-tvg-url=\"%s\"\n" % (FORMAT_DESCRIPTOR, xmltvurl, xmltvurl))
|
||||||
|
|
||||||
|
channel_items = []
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
fileName = "channels.m3u"
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
||||||
|
fileName = "%s.m3u" % channel_obj.number
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
channels_info = {}
|
||||||
|
for channel_obj in channel_items:
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_obj.dict['origin_id'])))
|
||||||
|
else:
|
||||||
|
logourl = channel_obj.thumbnail
|
||||||
|
|
||||||
|
channels_info[channel_obj.number] = {
|
||||||
|
"channelID": str(channel_obj.dict['origin_id']),
|
||||||
|
"tvg-chno": str(channel_obj.number),
|
||||||
|
"tvg-name": str(channel_obj.dict['name']),
|
||||||
|
"tvg-id": str(channel_obj.number),
|
||||||
|
"tvg-logo": logourl,
|
||||||
|
"group-title": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"group-titleb": str(channel_obj.dict['name']),
|
||||||
|
"stream_url": "%s%s" % (base_url, channel_obj.api_stream_url)
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
||||||
|
sorted_chan_guide = []
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
sorted_chan_guide.append(channels_info[channel])
|
||||||
|
|
||||||
|
for channel_item_dict in sorted_chan_guide:
|
||||||
|
m3ustring = "%s:0 " % (RECORD_MARKER)
|
||||||
|
for chan_key in list(channel_item_dict.keys()):
|
||||||
|
if not chan_key.startswith(tuple(["group-title", "stream_url"])):
|
||||||
|
m3ustring += "%s=\"%s\" " % (chan_key, channel_item_dict[chan_key])
|
||||||
|
m3ustring += "group-title=\"%s\",%s\n" % (channel_item_dict["group-title"], channel_item_dict["group-titleb"])
|
||||||
|
m3ustring += "%s\n" % channel_item_dict["stream_url"]
|
||||||
|
fakefile.write(m3ustring)
|
||||||
|
|
||||||
|
channels_m3u = fakefile.getvalue()
|
||||||
|
|
||||||
|
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
|
||||||
|
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
|
||||||
|
return resp
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
32
fHDHR_web/api/root_url.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from flask import redirect, request, session
|
||||||
|
|
||||||
|
|
||||||
|
class Root_URL():
|
||||||
|
endpoints = ["/"]
|
||||||
|
endpoint_name = "page_root_html"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
|
||||||
|
# Client Devices Discovering Device Information
|
||||||
|
if not user_agent or session["is_plexmediaserver"]:
|
||||||
|
|
||||||
|
# Plex Remote Media Grabber redirect
|
||||||
|
if self.fhdhr.config.dict["rmg"]["enabled"] and session["is_plexmediaserver"]:
|
||||||
|
return redirect("/rmg")
|
||||||
|
|
||||||
|
# Client Device is looking for HDHR type device
|
||||||
|
else:
|
||||||
|
return redirect("/hdhr/device.xml")
|
||||||
|
|
||||||
|
# Anything Else is likely a Web Browser
|
||||||
|
else:
|
||||||
|
return redirect("/index")
|
||||||
37
fHDHR_web/api/route_list.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from flask import Response, request, redirect, session
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Route_List():
|
||||||
|
endpoints = ["/api/routes"]
|
||||||
|
endpoint_name = "api_routes"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
return_json = json.dumps(session["route_list"], indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=return_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
79
fHDHR_web/api/settings.py
Normal file
@ -0,0 +1,79 @@
|
|||||||
|
from flask import request, redirect, Response, session
|
||||||
|
import urllib.parse
|
||||||
|
import threading
|
||||||
|
import time
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Settings():
|
||||||
|
endpoints = ["/api/settings"]
|
||||||
|
endpoint_name = "api_settings"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.restart_url = "/api/settings?method=restart_actual"
|
||||||
|
self.restart_sleep = 5
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
web_settings_dict = {}
|
||||||
|
for config_section in list(self.fhdhr.config.conf_default.keys()):
|
||||||
|
web_settings_dict[config_section] = {}
|
||||||
|
|
||||||
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
|
real_config_section = config_section
|
||||||
|
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
real_config_section = "origin"
|
||||||
|
web_settings_dict[config_section][config_item] = {
|
||||||
|
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
||||||
|
}
|
||||||
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]:
|
||||||
|
web_settings_dict[config_section][config_item]["value"] = "***********"
|
||||||
|
|
||||||
|
return_json = json.dumps(web_settings_dict, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=return_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
config_section = request.form.get('config_section', None)
|
||||||
|
config_name = request.form.get('config_name', None)
|
||||||
|
config_value = request.form.get('config_value', None)
|
||||||
|
|
||||||
|
if not config_section or not config_name or not config_value:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if config_section == "origin":
|
||||||
|
config_section = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.config.write(config_section, config_name, config_value)
|
||||||
|
|
||||||
|
elif method == "restart":
|
||||||
|
restart_thread = threading.Thread(target=self.restart_thread)
|
||||||
|
restart_thread.start()
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("Restarting in %s seconds" % self.restart_sleep)))
|
||||||
|
|
||||||
|
elif method == "restart_actual":
|
||||||
|
session["restart"] = True
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
|
|
||||||
|
def restart_thread(self):
|
||||||
|
time.sleep(self.restart_sleep)
|
||||||
|
self.fhdhr.api.get(self.restart_url)
|
||||||
34
fHDHR_web/api/startup_tasks.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class Startup_Tasks():
|
||||||
|
endpoints = ["/api/startup_tasks"]
|
||||||
|
endpoint_name = "api_startup_tasks"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.epg_update_url = "/api/epg?method=update"
|
||||||
|
self.channel_update_url = "/api/channels?method=scan"
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
# Hit Channel Update API
|
||||||
|
haseverscanned = self.fhdhr.db.get_fhdhr_value("channels", "scanned_time")
|
||||||
|
updatechannels = False
|
||||||
|
if not haseverscanned:
|
||||||
|
updatechannels = True
|
||||||
|
elif self.fhdhr.config.dict["fhdhr"]["chanscan_on_start"]:
|
||||||
|
updatechannels = True
|
||||||
|
|
||||||
|
if updatechannels:
|
||||||
|
self.fhdhr.api.get(self.channel_update_url)
|
||||||
|
|
||||||
|
# Hit EPG Update API
|
||||||
|
for epg_method in self.fhdhr.device.epg.epg_methods:
|
||||||
|
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
|
||||||
|
|
||||||
|
return "Success"
|
||||||
58
fHDHR_web/api/tools.py
Normal file
@ -0,0 +1,58 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class API_Tools():
|
||||||
|
endpoints = ["/api/tools"]
|
||||||
|
endpoint_name = "api_tools"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
endpoint_default_parameters = {
|
||||||
|
"method": "get"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
return_json = json.dumps({"tools": "api for tools page"}, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=return_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "prettyjson":
|
||||||
|
|
||||||
|
dirty_json_url = request.form.get('url', None)
|
||||||
|
|
||||||
|
try:
|
||||||
|
json_url_req = self.fhdhr.web.session.get(dirty_json_url)
|
||||||
|
json_url_req.raise_for_status()
|
||||||
|
json_resp = json_url_req.json()
|
||||||
|
except self.fhdhr.web.exceptions.HTTPError as err:
|
||||||
|
self.fhdhr.logger.error('Error while getting stations: %s' % err)
|
||||||
|
json_resp = {"error": 'Error while getting stations: %s' % err}
|
||||||
|
|
||||||
|
return_json = json.dumps(json_resp, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=return_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
152
fHDHR_web/api/tuners.py
Normal file
@ -0,0 +1,152 @@
|
|||||||
|
from flask import Response, request, redirect, abort, stream_with_context, session
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners():
|
||||||
|
endpoints = ["/api/tuners"]
|
||||||
|
endpoint_name = "api_tuners"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
endpoint_default_parameters = {
|
||||||
|
"method": "status"
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.quality = self.fhdhr.config.dict["streaming"]["quality"]
|
||||||
|
if self.quality:
|
||||||
|
self.quality = str(self.quality).lower()
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
client_address = request.remote_addr
|
||||||
|
|
||||||
|
accessed_url = request.args.get('accessed', default=request.url, type=str)
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
tuner_number = request.args.get('tuner', default=None, type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
|
||||||
|
channel_number = request.args.get('channel', None, type=str)
|
||||||
|
if not channel_number:
|
||||||
|
return "Missing Channel"
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
channel_dict = self.fhdhr.device.channels.get_channel_dict("number", channel_number)
|
||||||
|
if not channel_dict["enabled"]:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=self.quality, type=str)
|
||||||
|
valid_transcode_types = [
|
||||||
|
None, "high", "medium", "low"
|
||||||
|
"heavy", "mobile", "internet720", "internet480", "internet360", "internet240"
|
||||||
|
]
|
||||||
|
if transcode not in valid_transcode_types:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
stream_args = {
|
||||||
|
"channel": channel_number,
|
||||||
|
"method": method,
|
||||||
|
"duration": duration,
|
||||||
|
"transcode": transcode,
|
||||||
|
"accessed": accessed_url,
|
||||||
|
"client": client_address,
|
||||||
|
"client_id": session["session_id"]
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available(channel_number)
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, channel_number)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
tuner.close()
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Tuner #%s to be used for stream." % tunernum)
|
||||||
|
tuner.set_status(stream_args)
|
||||||
|
session["tuner_used"] = tunernum
|
||||||
|
|
||||||
|
return Response(stream_with_context(tuner.get_stream(stream_args, tuner)), mimetype=stream_args["content_type"])
|
||||||
|
|
||||||
|
elif method == "close":
|
||||||
|
|
||||||
|
if not tuner_number or str(tuner_number) not in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
return "%s Invalid tuner" % str(tuner_number)
|
||||||
|
|
||||||
|
session["tuner_used"] = tuner_number
|
||||||
|
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tuner_number)]
|
||||||
|
tuner.close()
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available(None)
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, None)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
||||||
|
tuner.channel_scan(grabbed=True)
|
||||||
|
|
||||||
|
elif method == "status":
|
||||||
|
|
||||||
|
if not tuner_number:
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
elif str(tuner_number) in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
tuner_status = self.fhdhr.device.tuners.tuners[str(tuner_number)].get_status()
|
||||||
|
else:
|
||||||
|
tuner_status = ["Invalid Tuner %s" % tuner_number]
|
||||||
|
|
||||||
|
tuner_status_json = json.dumps(tuner_status, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=tuner_status_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
89
fHDHR_web/api/w3u.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort
|
||||||
|
|
||||||
|
|
||||||
|
class W3U():
|
||||||
|
endpoints = ["/api/w3u"]
|
||||||
|
endpoint_name = "api_w3u"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
channel_info_m3u = {
|
||||||
|
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"image": '%s/favicon.ico' % base_url,
|
||||||
|
"epg": '%s/api/xmltv' % base_url,
|
||||||
|
"stations": []
|
||||||
|
}
|
||||||
|
|
||||||
|
channel_items = []
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
fileName = "channels.w3u"
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
||||||
|
fileName = "%s.w3u" % channel_obj.number
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
channels_info = {}
|
||||||
|
|
||||||
|
for channel_obj in channel_items:
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_obj.dict['origin_id'])))
|
||||||
|
else:
|
||||||
|
logourl = channel_obj.thumbnail
|
||||||
|
|
||||||
|
channels_info[channel_obj.number] = {
|
||||||
|
"name": str(channel_obj.dict['name']),
|
||||||
|
"url": "%s%s" % (base_url, channel_obj.api_stream_url),
|
||||||
|
"epgId": str(channel_obj.dict['origin_id']),
|
||||||
|
"image": logourl,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channels_info.keys()))
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
channel_info_m3u["stations"].append(channels_info[channel])
|
||||||
|
|
||||||
|
channels_info_json = json.dumps(channel_info_m3u, indent=4)
|
||||||
|
|
||||||
|
resp = Response(status=200, response=channels_info_json, mimetype='application/json')
|
||||||
|
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
|
||||||
|
return resp
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_info_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
194
fHDHR_web/api/xmltv.py
Normal file
@ -0,0 +1,194 @@
|
|||||||
|
from flask import Response, request, redirect, session
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
from io import BytesIO
|
||||||
|
import urllib.parse
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/xmltv", "/xmltv.xml"]
|
||||||
|
endpoint_name = "api_xmltv"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.xmltv_offset = {}
|
||||||
|
for epg_method in list(self.fhdhr.device.epg.epg_handling.keys()):
|
||||||
|
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||||
|
if "xmltv_offset" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||||
|
self.xmltv_offset[epg_method] = self.fhdhr.config.dict[epg_method]["xmltv_offset"]
|
||||||
|
if epg_method not in list(self.xmltv_offset.keys()):
|
||||||
|
self.xmltv_offset[epg_method] = self.fhdhr.config.dict["epg"]["xmltv_offset"]
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||||
|
if session["deviceauth"] != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||||
|
return "not subscribed"
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
|
||||||
|
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[chan_obj.number]["number"] = chan_obj.number
|
||||||
|
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
|
||||||
|
xmltv_xml = self.create_xmltv(base_url, epgdict, source)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=xmltv_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
|
|
||||||
|
def xmltv_headers(self):
|
||||||
|
"""This method creates the XML headers for our xmltv"""
|
||||||
|
xmltvgen = xml.etree.ElementTree.Element('tv')
|
||||||
|
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||||
|
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||||
|
xmltvgen.set('generator-info-url', 'fHDHR/%s' % self.fhdhr.config.dict["main"]["reponame"])
|
||||||
|
return xmltvgen
|
||||||
|
|
||||||
|
def xmltv_file(self, xmltvgen):
|
||||||
|
"""This method is used to close out the xml file"""
|
||||||
|
xmltvfile = BytesIO()
|
||||||
|
xmltvfile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
xmltvfile.write(xml.etree.ElementTree.tostring(xmltvgen, encoding='UTF-8'))
|
||||||
|
return xmltvfile.getvalue()
|
||||||
|
|
||||||
|
def xmltv_empty(self):
|
||||||
|
"""This method is called when creation of a full xmltv is not possible"""
|
||||||
|
return self.xmltv_file(self.xmltv_headers())
|
||||||
|
|
||||||
|
def timestamp_to_datetime(self, time_start, time_end, source):
|
||||||
|
xmltvtimetamps = {}
|
||||||
|
source_offset = self.xmltv_offset[source]
|
||||||
|
for time_item, time_value in zip(["time_start", "time_end"], [time_start, time_end]):
|
||||||
|
timestampval = datetime.datetime.fromtimestamp(time_value).strftime('%Y%m%d%H%M%S')
|
||||||
|
xmltvtimetamps[time_item] = "%s %s" % (timestampval, source_offset)
|
||||||
|
return xmltvtimetamps
|
||||||
|
|
||||||
|
def create_xmltv(self, base_url, epgdict, source):
|
||||||
|
if not epgdict:
|
||||||
|
return self.xmltv_empty()
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
|
||||||
|
out = self.xmltv_headers()
|
||||||
|
|
||||||
|
if source in ["origin", "blocks", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
epgdict[chan_obj.number] = epgdict.pop(c)
|
||||||
|
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[chan_obj.number]["number"] = chan_obj.number
|
||||||
|
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
|
||||||
|
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=("%s/api/images?method=get&type=channel&id=%s" % (base_url, epgdict[c]['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||||
|
|
||||||
|
for channelnum in list(epgdict.keys()):
|
||||||
|
|
||||||
|
channel_listing = epgdict[channelnum]['listing']
|
||||||
|
|
||||||
|
for program in channel_listing:
|
||||||
|
|
||||||
|
xmltvtimetamps = self.timestamp_to_datetime(program['time_start'], program['time_end'], source)
|
||||||
|
|
||||||
|
prog_out = sub_el(out, 'programme',
|
||||||
|
start=xmltvtimetamps['time_start'],
|
||||||
|
stop=xmltvtimetamps['time_end'],
|
||||||
|
channel=str(channelnum))
|
||||||
|
|
||||||
|
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'sub-title', lang='en', text='Movie: %s' % program['sub-title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||||
|
|
||||||
|
for f in program['genres']:
|
||||||
|
sub_el(prog_out, 'category', lang='en', text=f)
|
||||||
|
sub_el(prog_out, 'genre', lang='en', text=f)
|
||||||
|
|
||||||
|
if program['seasonnumber'] and program['episodenumber']:
|
||||||
|
s_ = int(str(program['seasonnumber']), 10)
|
||||||
|
e_ = int(str(program['episodenumber']), 10)
|
||||||
|
sub_el(prog_out, 'episode-num', system='dd_progid',
|
||||||
|
text=str(program['id']))
|
||||||
|
sub_el(prog_out, 'episode-num', system='common',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||||
|
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||||
|
sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
|
||||||
|
if program["thumbnail"]:
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(prog_out, 'icon', src=("%s/api/images?method=get&type=content&id=%s" % (base_url, program['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=("%s/api/images?method=generate&type=content&message=%s" % (base_url, urllib.parse.quote(program['title']))))
|
||||||
|
|
||||||
|
if program['rating']:
|
||||||
|
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||||
|
sub_el(rating_out, 'value', text=program['rating'])
|
||||||
|
|
||||||
|
if program['isnew']:
|
||||||
|
sub_el(prog_out, 'new')
|
||||||
|
|
||||||
|
return self.xmltv_file(out)
|
||||||
17
fHDHR_web/brython/__init__.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .brython import Brython
|
||||||
|
from .brython_stdlib import Brython_stdlib
|
||||||
|
|
||||||
|
from .brython_bry import Brython_bry
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Brython():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.brython = Brython(fhdhr)
|
||||||
|
self.brython_stdlib = Brython_stdlib(fhdhr)
|
||||||
|
|
||||||
|
self.brython_bry = Brython_bry(fhdhr)
|
||||||
14160
fHDHR_web/brython/brython.js
Normal file
21
fHDHR_web/brython/brython.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
class Brython():
|
||||||
|
endpoints = ["/brython.js"]
|
||||||
|
endpoint_name = "file_brython_js"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.brython_path = pathlib.Path(self.fhdhr.config.internal["paths"]["fHDHR_web_dir"]).joinpath('brython')
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.brython_path,
|
||||||
|
'brython.js',
|
||||||
|
mimetype='text/javascript')
|
||||||
19
fHDHR_web/brython/brython_bry.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
class Brython_bry():
|
||||||
|
endpoints = ["/brython.bry"]
|
||||||
|
endpoint_name = "file_brython_bry"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.brython_path = pathlib.Path(self.fhdhr.config.internal["paths"]["fHDHR_web_dir"]).joinpath('brython')
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.brython_path, 'brython_code.py')
|
||||||
103
fHDHR_web/brython/brython_code.py
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
from browser import document, bind # alert, window
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
def chan_edit_data(items, channel_id):
|
||||||
|
|
||||||
|
chanlist = []
|
||||||
|
chandict = {}
|
||||||
|
|
||||||
|
for element in items:
|
||||||
|
if element.name == "id":
|
||||||
|
if len(chandict.keys()) >= 2 and "id" in list(chandict.keys()):
|
||||||
|
chanlist.append(chandict)
|
||||||
|
chandict = {"id": element.value}
|
||||||
|
if element.type == "checkbox":
|
||||||
|
if element.name in ["enabled"]:
|
||||||
|
save_val = element.checked
|
||||||
|
else:
|
||||||
|
save_val = int(element.checked)
|
||||||
|
else:
|
||||||
|
save_val = element.value
|
||||||
|
if element.name != "id":
|
||||||
|
cur_value = element.placeholder
|
||||||
|
if element.type == "checkbox":
|
||||||
|
if element.name in ["enabled"]:
|
||||||
|
cur_value = element.placeholder
|
||||||
|
else:
|
||||||
|
cur_value = int(element.placeholder)
|
||||||
|
if str(save_val) != str(cur_value):
|
||||||
|
chandict[element.name] = save_val
|
||||||
|
|
||||||
|
if channel_id != "all":
|
||||||
|
chanlist == [x for x in chanlist if x["id"] == channel_id]
|
||||||
|
|
||||||
|
return chanlist
|
||||||
|
|
||||||
|
|
||||||
|
def chan_edit_postform(chanlist):
|
||||||
|
postForm = document.createElement('form')
|
||||||
|
postForm.method = "POST"
|
||||||
|
postForm.action = "/api/channels?method=modify&redirect=/channels_editor"
|
||||||
|
postForm.setRequestHeader = "('Content-Type', 'application/json')"
|
||||||
|
|
||||||
|
postData = document.createElement('input')
|
||||||
|
postData.type = 'hidden'
|
||||||
|
postData.name = "channels"
|
||||||
|
postData.value = json.dumps(chanlist)
|
||||||
|
|
||||||
|
postForm.appendChild(postData)
|
||||||
|
document.body.appendChild(postForm)
|
||||||
|
return postForm
|
||||||
|
|
||||||
|
|
||||||
|
@bind("#Chan_Edit_Reset", "submit")
|
||||||
|
def chan_edit_reset(evt):
|
||||||
|
chanlist = chan_edit_data(
|
||||||
|
document.select(".reset"),
|
||||||
|
str(evt.currentTarget.children[0].id).replace("reset_", ""))
|
||||||
|
postForm = chan_edit_postform(chanlist)
|
||||||
|
postForm.submit()
|
||||||
|
evt.preventDefault()
|
||||||
|
|
||||||
|
|
||||||
|
@bind("#Chan_Edit_Modify", "submit")
|
||||||
|
def chan_edit_modify(evt):
|
||||||
|
chanlist = chan_edit_data(
|
||||||
|
document.select(".channels"),
|
||||||
|
str(evt.currentTarget.children[0].id).replace("update_", ""))
|
||||||
|
postForm = chan_edit_postform(chanlist)
|
||||||
|
postForm.submit()
|
||||||
|
evt.preventDefault()
|
||||||
|
|
||||||
|
|
||||||
|
@bind("#Chan_Edit_Enable_Toggle", "click")
|
||||||
|
def chan_edit_enable(event):
|
||||||
|
enable_bool = bool(int(document["enable_button"].value))
|
||||||
|
for element in document.get(selector='input[type="checkbox"]'):
|
||||||
|
if element.name == "enabled":
|
||||||
|
element.checked = enable_bool
|
||||||
|
element.value = enable_bool
|
||||||
|
|
||||||
|
if not enable_bool:
|
||||||
|
document["enable_button"].value = "1"
|
||||||
|
document["enable_button"].text = "Enable All"
|
||||||
|
else:
|
||||||
|
document["enable_button"].value = "0"
|
||||||
|
document["enable_button"].text = "Disable All"
|
||||||
|
|
||||||
|
|
||||||
|
@bind("#Chan_Edit_Favorite_Toggle", "click")
|
||||||
|
def chan_edit_favorite(event):
|
||||||
|
enable_bool = bool(int(document["favorite_button"].value))
|
||||||
|
for element in document.get(selector='input[type="checkbox"]'):
|
||||||
|
if element.name == "favorite":
|
||||||
|
element.checked = enable_bool
|
||||||
|
element.value = int(enable_bool)
|
||||||
|
|
||||||
|
if not enable_bool:
|
||||||
|
document["favorite_button"].value = "1"
|
||||||
|
document["favorite_button"].text = "Favorite All"
|
||||||
|
else:
|
||||||
|
document["favorite_button"].value = "0"
|
||||||
|
document["favorite_button"].text = "Unfavorite All"
|
||||||
3
fHDHR_web/brython/brython_stdlib.js
Normal file
22
fHDHR_web/brython/brython_stdlib.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
import pathlib
|
||||||
|
|
||||||
|
|
||||||
|
class Brython_stdlib():
|
||||||
|
endpoints = ["/brython_stdlib.js"]
|
||||||
|
endpoint_name = "file_brython_stdlib_js"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.brython_path = pathlib.Path(self.fhdhr.config.internal["paths"]["fHDHR_web_dir"]).joinpath('brython')
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.brython_path,
|
||||||
|
'brython_stdlib.js',
|
||||||
|
mimetype='text/javascript')
|
||||||
15
fHDHR_web/files/__init__.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .favicon_ico import Favicon_ICO
|
||||||
|
from .style_css import Style_CSS
|
||||||
|
from .device_xml import Device_XML
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Files():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
|
self.style = Style_CSS(fhdhr)
|
||||||
|
self.device_xml = Device_XML(fhdhr)
|
||||||
19
fHDHR_web/files/device_xml.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
from flask import redirect, session
|
||||||
|
|
||||||
|
|
||||||
|
class Device_XML():
|
||||||
|
endpoints = ["/device.xml"]
|
||||||
|
endpoint_name = "file_device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["rmg"]["enabled"] and session["is_plexmediaserver"]:
|
||||||
|
return redirect("/rmg/device.xml")
|
||||||
|
else:
|
||||||
|
return redirect("/hdhr/device.xml")
|
||||||
18
fHDHR_web/files/favicon_ico.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Favicon_ICO():
|
||||||
|
endpoints = ["/favicon.ico"]
|
||||||
|
endpoint_name = "file_favicon_ico"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||||
|
'favicon.ico',
|
||||||
|
mimetype='image/vnd.microsoft.icon')
|
||||||
44
fHDHR_web/files/style_css.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from flask import Response
|
||||||
|
import pathlib
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class Style_CSS():
|
||||||
|
endpoints = ["/style.css"]
|
||||||
|
endpoint_name = "file_style_css"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.internal_style_file = pathlib.Path(
|
||||||
|
self.fhdhr.config.internal["paths"]["www_dir"]).joinpath('style.css')
|
||||||
|
|
||||||
|
self.internal_style = StringIO()
|
||||||
|
self.internal_style.write(open(self.internal_style_file).read())
|
||||||
|
|
||||||
|
self.pull_external_theme()
|
||||||
|
|
||||||
|
def pull_external_theme(self):
|
||||||
|
self.external_style = None
|
||||||
|
self.external_style_address = None
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"]:
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"].startswith(tuple(["http://", "https://"])):
|
||||||
|
css_req = self.fhdhr.web.session.get(self.fhdhr.config.dict["web_ui"]["theme"])
|
||||||
|
self.external_style = StringIO(css_req.text)
|
||||||
|
self.external_style_address = self.fhdhr.config.dict["web_ui"]["theme"]
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
main_output = StringIO()
|
||||||
|
|
||||||
|
main_output.write(self.internal_style.getvalue())
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"]:
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"] != self.external_style_address:
|
||||||
|
self.pull_external_theme()
|
||||||
|
if self.external_style:
|
||||||
|
main_output.write(self.external_style.getvalue())
|
||||||
|
|
||||||
|
return Response(status=200, response=main_output.getvalue(), mimetype="text/css")
|
||||||
31
fHDHR_web/hdhr/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .lineup_xml import Lineup_XML
|
||||||
|
from .discover_json import Discover_JSON
|
||||||
|
from .lineup_json import Lineup_JSON
|
||||||
|
from .lineup_status_json import Lineup_Status_JSON
|
||||||
|
|
||||||
|
from .lineup_post import Lineup_Post
|
||||||
|
from .device_xml import HDHR_Device_XML
|
||||||
|
|
||||||
|
from .auto import Auto
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HDHR():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.lineup_post = Lineup_Post(fhdhr)
|
||||||
|
|
||||||
|
self.device_xml = HDHR_Device_XML(fhdhr)
|
||||||
|
|
||||||
|
self.auto = Auto(fhdhr)
|
||||||
|
self.tuner = Tuner(fhdhr)
|
||||||
|
|
||||||
|
self.lineup_xml = Lineup_XML(fhdhr)
|
||||||
|
|
||||||
|
self.discover_json = Discover_JSON(fhdhr)
|
||||||
|
self.lineup_json = Lineup_JSON(fhdhr)
|
||||||
|
self.lineup_status_json = Lineup_Status_JSON(fhdhr)
|
||||||
45
fHDHR_web/hdhr/auto.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Auto():
|
||||||
|
endpoints = ['/auto/<channel>', '/hdhr/auto/<channel>']
|
||||||
|
endpoint_name = "hdhr_auto"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, channel, *args):
|
||||||
|
return self.get(channel, *args)
|
||||||
|
|
||||||
|
def get(self, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/tuners?method=%s" % (method)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
53
fHDHR_web/hdhr/device_xml.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class HDHR_Device_XML():
|
||||||
|
endpoints = ["/hdhr/device.xml"]
|
||||||
|
endpoint_name = "hdhr_device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
"""Device.xml referenced from SSDP"""
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('root')
|
||||||
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
|
sub_el(out, 'URLBase', "%s" % base_url)
|
||||||
|
|
||||||
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
|
sub_el(specVersion_out, 'major', "1")
|
||||||
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
|
device_out = sub_el(out, 'device')
|
||||||
|
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
|
|
||||||
|
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||||
|
sub_el(device_out, 'manufacturerURL', "https://github.com/fHDHR/%s" % self.fhdhr.config.dict["main"]["reponame"])
|
||||||
|
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.fhdhr.config.internal["versions"]["fHDHR"])
|
||||||
|
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
|
||||||
|
sub_el(device_out, 'UDN', "uuid:%s" % self.fhdhr.config.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
device_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=device_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
35
fHDHR_web/hdhr/discover_json.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Discover_JSON():
|
||||||
|
endpoints = ["/discover.json", "/hdhr/discover.json"]
|
||||||
|
endpoint_name = "hdhr_discover_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsondiscover = {
|
||||||
|
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||||
|
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||||
|
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||||
|
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||||
|
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||||
|
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||||
|
"BaseURL": "%s" % base_url,
|
||||||
|
"LineupURL": "%s/lineup.json" % base_url
|
||||||
|
}
|
||||||
|
discover_json = json.dumps(jsondiscover, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=discover_json,
|
||||||
|
mimetype='application/json')
|
||||||
46
fHDHR_web/hdhr/lineup_json.py
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_JSON():
|
||||||
|
endpoints = ["/lineup.json", "/hdhr/lineup.json"]
|
||||||
|
endpoint_name = "hdhr_lineup_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
channelslist = {}
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
lineup_dict = channel_obj.lineup_dict
|
||||||
|
lineup_dict["URL"] = "%s%s" % (base_url, lineup_dict["URL"])
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
|
||||||
|
channelslist[channel_obj.number] = lineup_dict
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channelslist.keys()))
|
||||||
|
sorted_chan_guide = []
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
sorted_chan_guide.append(channelslist[channel])
|
||||||
|
|
||||||
|
lineup_json = json.dumps(sorted_chan_guide, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
60
fHDHR_web/hdhr/lineup_post.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from flask import request, abort, Response
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Post():
|
||||||
|
endpoints = ["/lineup.post", "/hdhr/lineup.post"]
|
||||||
|
endpoint_name = "hdhr_lineup_post"
|
||||||
|
endpoint_methods = ["POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if 'scan' in list(request.args.keys()):
|
||||||
|
|
||||||
|
if request.args['scan'] == 'start':
|
||||||
|
try:
|
||||||
|
self.fhdhr.device.tuners.tuner_scan()
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info(str(e))
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
elif request.args['scan'] == 'abort':
|
||||||
|
self.fhdhr.device.tuners.stop_tuner_scan()
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown scan command %s" % request.args['scan'])
|
||||||
|
return abort(200, "Not a valid scan command")
|
||||||
|
|
||||||
|
elif 'favorite' in list(request.args.keys()):
|
||||||
|
if request.args['favorite'].startstwith(tuple(["+", "-", "x"])):
|
||||||
|
|
||||||
|
channel_method = request.args['favorite'][0]
|
||||||
|
channel_number = request.args['favorite'][1:]
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if channel_method == "+":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "-":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "x":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
|
||||||
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid command")
|
||||||
50
fHDHR_web/hdhr/lineup_status_json.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Status_JSON():
|
||||||
|
endpoints = ["/lineup_status.json", "/hdhr/lineup_status.json"]
|
||||||
|
endpoint_name = "hdhr_lineup_status_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
tuners_scanning = 0
|
||||||
|
for tuner_number in list(tuner_status.keys()):
|
||||||
|
if tuner_status[tuner_number]["status"] == "Scanning":
|
||||||
|
tuners_scanning += 1
|
||||||
|
|
||||||
|
if tuners_scanning:
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
elif not len(self.fhdhr.device.channels.list):
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
else:
|
||||||
|
jsonlineup = self.not_scanning()
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
def scan_in_progress(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": len(self.fhdhr.device.channels.list)
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def not_scanning(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
56
fHDHR_web/hdhr/lineup_xml.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import channel_sort, sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_XML():
|
||||||
|
endpoints = ["/lineup.xml", "/hdhr/lineup.xml"]
|
||||||
|
endpoint_name = "hdhr_lineup_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
channelslist = {}
|
||||||
|
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels()]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
lineup_dict = channel_obj.lineup_dict
|
||||||
|
lineup_dict["URL"] = "%s%s" % (base_url, lineup_dict["URL"])
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
|
||||||
|
channelslist[channel_obj.number] = lineup_dict
|
||||||
|
|
||||||
|
# Sort the channels
|
||||||
|
sorted_channel_list = channel_sort(list(channelslist.keys()))
|
||||||
|
sorted_chan_guide = []
|
||||||
|
for channel in sorted_channel_list:
|
||||||
|
sorted_chan_guide.append(channelslist[channel])
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('Lineup')
|
||||||
|
for lineup_dict in sorted_chan_guide:
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
for key in list(lineup_dict.keys()):
|
||||||
|
sub_el(program_out, str(key), str(lineup_dict[key]))
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
lineup_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
47
fHDHR_web/hdhr/tuner.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
endpoints = ['/tuner<tuner_number>/<channel>', '/hdhr/tuner<tuner_number>/<channel>']
|
||||||
|
endpoint_name = "hdhr_tuner"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, tuner_number, channel, *args):
|
||||||
|
return self.get(tuner_number, channel, *args)
|
||||||
|
|
||||||
|
def get(self, tuner_number, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/tuners?method=%s" % (method)
|
||||||
|
|
||||||
|
redirect_url += "&tuner=%s" % str(tuner_number)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||