test
17
.github/stale.yml
vendored
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
# Number of days of inactivity before an issue becomes stale
|
||||||
|
daysUntilStale: 60
|
||||||
|
# Number of days of inactivity before a stale issue is closed
|
||||||
|
daysUntilClose: 7
|
||||||
|
# Issues with these labels will never be considered stale
|
||||||
|
exemptLabels:
|
||||||
|
- pinned
|
||||||
|
- security
|
||||||
|
# Label to use when marking an issue as stale
|
||||||
|
staleLabel: wontfix
|
||||||
|
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||||
|
markComment: >
|
||||||
|
This issue has been automatically marked as stale because it has not had
|
||||||
|
recent activity. It will be closed if no further activity occurs. Thank you
|
||||||
|
for your contributions.
|
||||||
|
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||||
|
closeComment: false
|
||||||
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM python:3.8-slim
|
||||||
|
|
||||||
|
RUN apt-get -qq update && \
|
||||||
|
apt-get -qq -y install ffmpeg gcc && \
|
||||||
|
apt-get autoclean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY ./ /app/
|
||||||
|
WORKDIR /app
|
||||||
|
RUN pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
ENTRYPOINT ["python3", "/app/main.py", "--config", "/app/config/config.ini"]
|
||||||
13
LICENSE
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
Version 2, December 2004
|
||||||
|
|
||||||
|
Copyright (C) 2020 Sam Zick <Sam@deathbybandaid.net>
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim or modified
|
||||||
|
copies of this license document, and changing it is allowed as long
|
||||||
|
as the name is changed.
|
||||||
|
|
||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||||
20
README.md
@ -1,2 +1,20 @@
|
|||||||
# fHDHR_IPTVorg-US
|
<p align="center">fHDHR_IPTVorg-US <img src="docs/images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
|
||||||
|
Welcome to the world of streaming content as a DVR device! We use some fancy python here to achieve a system of:
|
||||||
|
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
|
||||||
|
Please Check the [Docs](docs/README.md) for Installation information.
|
||||||
|
|
||||||
|
fHDHR is labeled as beta until we reach v1.0.0
|
||||||
|
|
||||||
|
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||||
|
|
||||||
|
|
||||||
|
Due to multiple issues, I'm dropping official support for Windows.
|
||||||
|
|||||||
2
alternative_epg/__init__.py
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
# pylama:ignore=W0401,W0611
|
||||||
|
# from .test import *
|
||||||
45
config.all.ini
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
# servicename = IPTVorg-US
|
||||||
|
# reponame = fHDHR_IPTVorg-US
|
||||||
|
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-IPTVorg-US
|
||||||
|
# reporting_firmware_name = fHDHR_IPTVorg-US
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[vlc]
|
||||||
|
# path = cvlc
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1048576
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
|
||||||
|
[iptvorg-us]
|
||||||
|
# force_best = False
|
||||||
4
config.example.ini
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
0
data/cache/PLACEHOLDER
vendored
Normal file
BIN
data/garamond.ttf
Normal file
39
data/internal_config/database.json
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"database":{
|
||||||
|
"type":{
|
||||||
|
"value": "sqlite",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"driver":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"user":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"pass":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"host":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"name":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
24
data/internal_config/epg.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"epg":{
|
||||||
|
"images":{
|
||||||
|
"value": "pass",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"method":{
|
||||||
|
"value": "blocks",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"update_frequency":{
|
||||||
|
"value": 43200,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"valid_epg_methods":{
|
||||||
|
"value": "None,blocks",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
74
data/internal_config/fhdhr.json
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
{
|
||||||
|
"fhdhr":{
|
||||||
|
"address":{
|
||||||
|
"value": "0.0.0.0",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"discovery_address":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": 5004,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_manufacturer":{
|
||||||
|
"value": "BoronDust",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_model":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_ver":{
|
||||||
|
"value": "20201001",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_tuner_type":{
|
||||||
|
"value": "Antenna",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"device_auth":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"require_auth":{
|
||||||
|
"value": false,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"chanscan_on_start":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"friendlyname":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"stream_type":{
|
||||||
|
"value": "direct",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"tuner_count":{
|
||||||
|
"value": 4,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_name":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
data/internal_config/logging.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"logging":{
|
||||||
|
"level":{
|
||||||
|
"value": "WARNING",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
39
data/internal_config/main.json
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
{
|
||||||
|
"main":{
|
||||||
|
"uuid":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"cache_dir":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"thread_method":{
|
||||||
|
"value": "multiprocessing",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"servicename":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"dictpopname":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"reponame":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"required":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
data/internal_config/rmg.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"rmg":{
|
||||||
|
"enabled":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
29
data/internal_config/ssdp.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"ssdp":{
|
||||||
|
"enabled":{
|
||||||
|
"value": true,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"max_age":{
|
||||||
|
"value": 1800,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"proto":{
|
||||||
|
"value": "ipv4",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"iface":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"multicast_address":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
33
data/internal_config/streaming.json
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
{
|
||||||
|
"ffmpeg":{
|
||||||
|
"path":{
|
||||||
|
"value": "ffmpeg",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vlc":{
|
||||||
|
"path":{
|
||||||
|
"value": "cvlc",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"direct_stream":{
|
||||||
|
"chunksize":{
|
||||||
|
"value": 1048576,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
9
data/internal_config/web_ui.json
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
{
|
||||||
|
"web_ui":{
|
||||||
|
"theme":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
129
docs/ADV_Config.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Here, we'll break down all of the configuration options per section.
|
||||||
|
|
||||||
|
## Main
|
||||||
|
Here's the `main` section.
|
||||||
|
* `uuid` will be created automatically, you need not worry about this.
|
||||||
|
* `cache_dir` is handy for keeping cached files out of the script directory. This is helpful for reinstalls as well as development.
|
||||||
|
|
||||||
|
````
|
||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
````
|
||||||
|
|
||||||
|
## fhdhr
|
||||||
|
|
||||||
|
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
||||||
|
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
||||||
|
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
||||||
|
* `tuner_count` is a limit of devices able to stream from the script.
|
||||||
|
* `friendlyname` is to set the name that Plex sees the script as.
|
||||||
|
* `stream_type` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||||
|
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-iptvorg-us
|
||||||
|
# reporting_firmware_name = fHDHR_IPTVorg-US
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
````
|
||||||
|
|
||||||
|
# EPG
|
||||||
|
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
||||||
|
* `method` defaults to `origin` and will pull the xmltv data from iptvorg-us. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
||||||
|
* `update_frequency` * `epg_update_frequency` determines how often we check for new scheduling information. In Seconds.
|
||||||
|
|
||||||
|
````
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
````
|
||||||
|
|
||||||
|
## ffmpeg
|
||||||
|
|
||||||
|
The `ffmpeg` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## vlc
|
||||||
|
|
||||||
|
The `vlc` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[vlc]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## direct_stream
|
||||||
|
|
||||||
|
The `direct_stream` section is for when you set the `[fhdhr]stream_type` to `direct`
|
||||||
|
* `chunksize` is how much data to read at a time.
|
||||||
|
|
||||||
|
````
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1024*1024
|
||||||
|
````
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
* `level` determines the amount of logging you wish to see in the console, as well as to the logfile (stored in your cache directory).
|
||||||
|
|
||||||
|
````
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
````
|
||||||
|
|
||||||
|
# Database
|
||||||
|
* experiment with these settings at your own risk. We use sqlalchemy to provide database options, but we default to sqlite.
|
||||||
|
|
||||||
|
TODO: improve documentation here.
|
||||||
|
|
||||||
|
````
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
````
|
||||||
|
|
||||||
|
## iptvorg-us
|
||||||
|
The `iptvorg-us` section
|
||||||
|
|
||||||
|
````
|
||||||
|
[iptvorg-us]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
41
docs/Config.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The example config file contains all of the things that the typical user may need to fill out.
|
||||||
|
|
||||||
|
Please see the Advanced Configuration page for more information.
|
||||||
|
|
||||||
|
## fHDHR
|
||||||
|
|
||||||
|
Under `fhdhr`, you'll find 2 addresses listed. `0.0.0.0` works great for a listen address, however, it seems that SSDP works best if the discovery address is set to the IP to say that there is a service at.
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
````
|
||||||
|
|
||||||
|
## iptvorg-us
|
||||||
|
|
||||||
|
iptvorg-us requires signin credentials, so add those.
|
||||||
|
|
||||||
|
````
|
||||||
|
[iptvorg-us]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
15
docs/Origin.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This varient of fHDHR connects to [iptvorg-us](https://iptvorg-us.com/about).
|
||||||
46
docs/README.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# The Boring Disclaimers (at the top of the docs for a reason)
|
||||||
|
|
||||||
|
|
||||||
|
fHDHR is a Python service to take various sources of video and make them accessible to client software including, but not limited to:
|
||||||
|
|
||||||
|
* [Plex](https://www.plex.tv/)
|
||||||
|
* [Emby](https://emby.media/)
|
||||||
|
* [Jellyfin](https://jellyfin.org/)
|
||||||
|
* [Channels](https://getchannels.com/)
|
||||||
|
|
||||||
|
fHDHR is not directly affiliated with the above client software, and you will receive NO support for this script via their forums.
|
||||||
|
|
||||||
|
fHDHR is able to connect to clients by emulating a piece of hardware called the [HDHomeRun from SiliconDust](https://www.silicondust.com/). fHDHR is in NO way affiliated with SiliconDust, and is NOT a HDHomeRun device. fHDHR simply uses the API structure used by the authentic HDHomeRun to connect to client DVR solutions.
|
||||||
|
|
||||||
|
# History
|
||||||
|
|
||||||
|
I got the Huappage QuadHD, and the Mohu Sail as a pandemic-project. All was fine working within Plex, but I also have emby setup as a backup to Plex when auth is broken.
|
||||||
|
|
||||||
|
I thought to myself, "Self, I should look on github for a way to share my tv tuner between the two".
|
||||||
|
|
||||||
|
That's when I tried both npvrProxy with NextPVR as well as tvhProxy with TVHeadend. I had to tinker with both to get them working, but I started testing which one I liked more.
|
||||||
|
|
||||||
|
Around this same time, I stumbled upon [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex). I wanted to contribute to that project to get it to a point that I could fork it to work for other video stream sources.
|
||||||
|
|
||||||
|
The locast2plex code development wasn't going quite fast enough for the feature-creep in my head.
|
||||||
|
|
||||||
|
I then proceded to create the initial iteration of fHDHR which I originally called "FakeHDHR". I've rewritten the core functionality a few times before landing on the current code structure, which feels 'right'.
|
||||||
|
|
||||||
|
I've worked really hard to create a structure that simplifies new variants of the core code to work with different 'origin' streams. Combining these works really well with [xTeVe](https://github.com/xteve-project/xTeVe).
|
||||||
|
|
||||||
|
One of the variants goes as far as scraping a table from a PDF file for creating a channel guide!
|
||||||
|
|
||||||
|
I can easily create more variants of the project to do other video sources. Paid ones, I could potentially accept donations for, as I don't want to pay to develop for multiple platforms.
|
||||||
26
docs/Related-Projects.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
While the fHDHR reops share very little code from the below projects, they were a source of inspiration:
|
||||||
|
|
||||||
|
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||||
|
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||||
|
|
||||||
|
|
||||||
|
Aside from the above, these other projects are worth a look as well:
|
||||||
|
|
||||||
|
* [npvrProxy](https://github.com/rogueosb/npvrProxy)
|
||||||
|
* [xTeVe](https://xteve.de/)
|
||||||
|
* [telly](https://github.com/tellytv/telly)
|
||||||
|
* [dizquetv](https://github.com/vexorian/dizquetv)
|
||||||
129
docs/Usage.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Author Notes
|
||||||
|
|
||||||
|
* All Testing is currently done in Proxmox LXC, Ubuntu 20.04, Python 3.8
|
||||||
|
|
||||||
|
|
||||||
|
# Prerequisites
|
||||||
|
|
||||||
|
* A Linux or Mac "Server". Windows currently does not work. A "Server" is a computer that is typically always online.
|
||||||
|
* Python 3.7 or later.
|
||||||
|
* Consult [This Page](Origin.md) for additional setup specific to this variant of fHDHR.
|
||||||
|
|
||||||
|
|
||||||
|
# Optional Prerequisites
|
||||||
|
* If you intend to use Docker, [This Guide](https://docs.docker.com/get-started/) should help you get started. The author of fHDHR is not a docker user, but will still try to help.
|
||||||
|
|
||||||
|
fHDHR uses direct connections with video sources by default. Alternatively, you can install and update the [config](Config.md) accordingly. You will need to make these available to your systems PATH, or manually set their path via the config file.
|
||||||
|
|
||||||
|
* ffmpeg
|
||||||
|
* vlc
|
||||||
|
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
## Linux
|
||||||
|
|
||||||
|
* Download the zip, or git clone
|
||||||
|
* Navigate into your script directory and run `pip3 install -r requirements.txt`
|
||||||
|
* Copy the included `config.example.ini` file to a known location. The script will not run without this. There is no default configuration file location. [Modify the configuration file to suit your needs.](Config.md)
|
||||||
|
|
||||||
|
* Run with `python3 main.py -c=` and the path to the config file.
|
||||||
|
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
This portion of the guide assumes you are using a Linux system with both docker and docker-compose installed. This (or some variation thereof) may work on Mac or Windows, but has not been tested.
|
||||||
|
|
||||||
|
* this guide assumes we wish to use the `~/fhdhr` directory for our install (you can use whatever directory you like, just make the appropriate changes elsewhere in this guide) and that we are installing for iptvorg-us support
|
||||||
|
* run the following commands to clone the repo into `~/fhdhr/fHDHR_IPTVorg-US`
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_IPTVorg-US.git
|
||||||
|
```
|
||||||
|
* create your config.ini file (as described earlier in this guide) in the `~/fhdhr/fHDHR_IPTVorg-US` directory
|
||||||
|
* while still in the `~/fhdhr` directory, create the following `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
iptvorg-us:
|
||||||
|
build: ./fHDHR_IPTVorg-US
|
||||||
|
container_name: iptvorg-us
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_IPTVorg-US/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d iptvorg-us
|
||||||
|
```
|
||||||
|
|
||||||
|
After a short period of time (during which docker will build your new fHDHR container), you should now have a working build of fHDHR running inside a docker container.
|
||||||
|
|
||||||
|
As the code changes and new versions / bug fixes are released, at any point you can pull the latest version of the code and rebuild your container with the following commands:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr/fHDHR_IPTVorg-US
|
||||||
|
git checkout master
|
||||||
|
git pull
|
||||||
|
cd ~/fhdhr
|
||||||
|
docker-compose up --build -d iptvorg-us
|
||||||
|
```
|
||||||
|
<hr />
|
||||||
|
|
||||||
|
You can also run multiple instances of fHDHR to support additional sources by cloning the appropriate repo into your `~/fhdhr` directory and adding the necessary services to the docker-compose file we created above.
|
||||||
|
|
||||||
|
* for example, if we also wanted iptvorg-us support, you would clone the iptvorg-us repository:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_IPTVorg-US.git
|
||||||
|
```
|
||||||
|
* **NOTE**: if you are running multiple services on the same machine, you must change the port in your config.ini file for each one. For example, if iptvorg-us was using the default port of 5004, iptvorg-us cannot also use that port. You must change the port in your iptvorg-us config.ini file to something else (5005, for example).
|
||||||
|
* add iptvorg-us as a service in your `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
iptvorg-us:
|
||||||
|
build: ./fHDHR_IPTVorg-US
|
||||||
|
container_name: iptvorg-us
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_IPTVorg-US/config.ini:/app/config/config.ini
|
||||||
|
|
||||||
|
iptvorg-us:
|
||||||
|
build: ./fHDHR_IPTVorg-US
|
||||||
|
container_name: iptvorg-us
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_IPTVorg-US/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d iptvorg-us
|
||||||
|
```
|
||||||
|
|
||||||
|
You can repeat these instructions for as many fHDHR containers as your system resources will allow.
|
||||||
|
|
||||||
|
# Setup
|
||||||
|
|
||||||
|
Now that you have fHDHR running, You can navigate (in a web browser) to the IP:Port from the configuration step above.
|
||||||
|
|
||||||
|
If you did not setup a `discovery_address` in your config, SSDP will be disabled. This is not a problem as clients like Plex can have the IP:Port entered manually!
|
||||||
|
|
||||||
|
You can copy the xmltv link from the webUI and use that in your client software to provide Channel Guide information.
|
||||||
98
docs/WebUI.md
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [iptvorg-us](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This Page will introduce basic handling of the script from the Web Interface provided at IP:Port
|
||||||
|
|
||||||
|
The Pages are available in the buttons at the top, links to xmltv and m3u are provided at the top for ease of access.
|
||||||
|
|
||||||
|
|
||||||
|
# Main Landing Page
|
||||||
|
|
||||||
|
Below is the main landing page with basic information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_main.PNG" alt="Main Page"/>
|
||||||
|
|
||||||
|
# iptvorg-us
|
||||||
|
|
||||||
|
Here you will have access to some basic information about the service we are proxying.
|
||||||
|
|
||||||
|
The webUI will still work, even if setup didn't go smoothly.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_origin.PNG" alt="Origin Page"/>
|
||||||
|
|
||||||
|
# Guide
|
||||||
|
|
||||||
|
This Page give you information about what is currently playing on all stations. It will also show the time remaining for each item.
|
||||||
|
|
||||||
|
* Note: The Play link in the left hand column can be copied to play a channel in VLC media player!
|
||||||
|
|
||||||
|
<img src="screenshots/webui_guide.PNG" alt="Guide Page"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Cluster
|
||||||
|
|
||||||
|
Since SSDP is used for service discovery, I decided to also use it for ease of management.
|
||||||
|
|
||||||
|
This tab will not have the below options if SSDP isn't running.
|
||||||
|
|
||||||
|
Joining a cluster will provide a second row of buttons for the clustered servers.
|
||||||
|
|
||||||
|
Unjoined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_unjoined.PNG" alt="Cluster Page, UnJoined"/>
|
||||||
|
|
||||||
|
Joined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_joined.PNG" alt="Cluster Page, Joined"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Streams
|
||||||
|
|
||||||
|
This Page will show all active streams, and tuner information. You can also terminate a stream from here.
|
||||||
|
|
||||||
|
* Note: Clients will often have an amount buffered, and the connection termination is not immediate from a viewing perspective. However, the connection to the source is indeed cut off.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_streams.PNG" alt="Streams Page"/>
|
||||||
|
|
||||||
|
# xmltv
|
||||||
|
|
||||||
|
This page will give you access to all the xmltv formats provided by this varient.
|
||||||
|
|
||||||
|
From here, you can manually update or even clear the cached epg, and then update.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_xmltv.PNG" alt="xmltv Page"/>
|
||||||
|
|
||||||
|
# Version
|
||||||
|
|
||||||
|
This page will give valuable information about the environment the script is being run in.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_version.PNG" alt="Version Page"/>
|
||||||
|
|
||||||
|
# Diganostics
|
||||||
|
|
||||||
|
This page has various links to json/xml files that make the magic work, as well as debug and cluster information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
|
|
||||||
|
# Settings
|
||||||
|
|
||||||
|
This page allows viewing/changing all possible configuration options.
|
||||||
|
|
||||||
|
* Note: This will require a restart of the script to have any effect.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
BIN
docs/images/logo.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/screenshots/webui_cluster_joined.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_cluster_unjoined.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
docs/screenshots/webui_diagnostics.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_guide.PNG
Normal file
|
After Width: | Height: | Size: 137 KiB |
BIN
docs/screenshots/webui_main.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_origin.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_settings.PNG
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
docs/screenshots/webui_streams.PNG
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
docs/screenshots/webui_version.PNG
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/screenshots/webui_xmltv.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
37
fHDHR/__init__.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
from .originwrapper import OriginServiceWrapper
|
||||||
|
from .device import fHDHR_Device
|
||||||
|
from .api import fHDHR_API_URLs
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
|
||||||
|
fHDHR_VERSION = "v0.4.6-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_INT_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db):
|
||||||
|
self.version = fHDHR_VERSION
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
self.web = fHDHR.tools.WebReq()
|
||||||
|
|
||||||
|
self.api = fHDHR_API_URLs(settings)
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db, origin, alternative_epg):
|
||||||
|
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db)
|
||||||
|
|
||||||
|
self.originwrapper = OriginServiceWrapper(self.fhdhr, origin)
|
||||||
|
|
||||||
|
self.device = fHDHR_Device(self.fhdhr, self.originwrapper, alternative_epg)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr." + name)
|
||||||
36
fHDHR/api/__init__.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API_URLs():
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
|
||||||
|
self.address = self.config.dict["fhdhr"]["address"]
|
||||||
|
self.discovery_address = self.config.dict["fhdhr"]["discovery_address"]
|
||||||
|
self.port = self.config.dict["fhdhr"]["port"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base(self):
|
||||||
|
if self.discovery_address:
|
||||||
|
return ('http://%s:%s' % self.discovery_address_tuple)
|
||||||
|
elif self.address == "0.0.0.0":
|
||||||
|
return ('http://%s:%s' % self.address_tuple)
|
||||||
|
else:
|
||||||
|
return ('http://%s:%s' % self.address_tuple)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def base_quoted(self):
|
||||||
|
return urllib.parse.quote(self.base)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def discovery_address_tuple(self):
|
||||||
|
return (self.discovery_address, int(self.port))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def localhost_address_tuple(self):
|
||||||
|
return ("127.0.0.1", int(self.port))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def address_tuple(self):
|
||||||
|
return (self.address, int(self.port))
|
||||||
0
fHDHR/cli/__init__.py
Normal file
115
fHDHR/cli/run.py
Normal file
@ -0,0 +1,115 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||||
|
import fHDHR.exceptions
|
||||||
|
import fHDHR.config
|
||||||
|
from fHDHR.db import fHDHRdb
|
||||||
|
|
||||||
|
ERR_CODE = 1
|
||||||
|
ERR_CODE_NO_RESTART = 2
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info.major == 2 or sys.version_info < (3, 7):
|
||||||
|
print('Error: fHDHR requires python 3.7+.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
if opersystem in ["Windows"]:
|
||||||
|
print("WARNING: This script may fail on Windows. Try Setting the `thread_method` to `threading`")
|
||||||
|
|
||||||
|
|
||||||
|
def build_args_parser():
|
||||||
|
"""Build argument parser for fHDHR"""
|
||||||
|
parser = argparse.ArgumentParser(description='fHDHR')
|
||||||
|
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_configuration(args, script_dir, origin, fHDHR_web):
|
||||||
|
if not os.path.isfile(args.cfg):
|
||||||
|
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||||
|
return fHDHR.config.Config(args.cfg, script_dir, origin, fHDHR_web)
|
||||||
|
|
||||||
|
|
||||||
|
def run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
|
||||||
|
fhdhr = fHDHR_OBJ(settings, logger, db, origin, alternative_epg)
|
||||||
|
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
print("HTTP Server Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_web = threading.Thread(target=fhdhrweb.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_web.start()
|
||||||
|
|
||||||
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
print("SSDP Server Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_ssdp = threading.Thread(target=fhdhr.device.ssdp.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_ssdp.start()
|
||||||
|
|
||||||
|
if settings.dict["epg"]["method"]:
|
||||||
|
print("EPG Update Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_epg = threading.Thread(target=fhdhr.device.epg.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_epg.start()
|
||||||
|
|
||||||
|
# wait forever
|
||||||
|
while True:
|
||||||
|
time.sleep(3600)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
def start(args, script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
"""Get Configuration for fHDHR and start"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
settings = get_configuration(args, script_dir, origin, fHDHR_web)
|
||||||
|
except fHDHR.exceptions.ConfigurationError as e:
|
||||||
|
print(e)
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
logger = settings.logging_setup()
|
||||||
|
|
||||||
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
|
return run(settings, logger, db, script_dir, fHDHR_web, origin, alternative_epg)
|
||||||
|
|
||||||
|
|
||||||
|
def main(script_dir, fHDHR_web, origin, alternative_epg):
|
||||||
|
"""fHDHR run script entry point"""
|
||||||
|
|
||||||
|
print("Loading fHDHR %s" % fHDHR_VERSION)
|
||||||
|
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
|
||||||
|
print("Loading Origin Service: %s %s" % (origin.ORIGIN_NAME, origin.ORIGIN_VERSION))
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = build_args_parser()
|
||||||
|
return start(args, script_dir, fHDHR_web, origin, alternative_epg)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n\nInterrupted")
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
318
fHDHR/config/__init__.py
Normal file
@ -0,0 +1,318 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
import configparser
|
||||||
|
import pathlib
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
import json
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
from fHDHR import fHDHR_VERSION
|
||||||
|
from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
||||||
|
|
||||||
|
|
||||||
|
class Config():
|
||||||
|
|
||||||
|
def __init__(self, filename, script_dir, origin, fHDHR_web):
|
||||||
|
self.origin = origin
|
||||||
|
self.fHDHR_web = fHDHR_web
|
||||||
|
|
||||||
|
self.internal = {}
|
||||||
|
self.conf_default = {}
|
||||||
|
self.dict = {}
|
||||||
|
self.config_file = filename
|
||||||
|
|
||||||
|
self.initial_load(script_dir)
|
||||||
|
self.config_verification()
|
||||||
|
|
||||||
|
def initial_load(self, script_dir):
|
||||||
|
|
||||||
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
|
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
|
||||||
|
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
|
||||||
|
|
||||||
|
self.internal["paths"] = {
|
||||||
|
"script_dir": script_dir,
|
||||||
|
"data_dir": data_dir,
|
||||||
|
"alternative_epg": pathlib.Path(script_dir).joinpath('alternative_epg'),
|
||||||
|
"origin": pathlib.Path(script_dir).joinpath('origin'),
|
||||||
|
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||||
|
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||||
|
"www_dir": www_dir,
|
||||||
|
"www_iptvorg-uss_dir": pathlib.Path(fHDHR_web_dir).joinpath('iptvorg-uss'),
|
||||||
|
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||||
|
}
|
||||||
|
|
||||||
|
for conffile in os.listdir(self.internal["paths"]["internal_config"]):
|
||||||
|
conffilepath = os.path.join(self.internal["paths"]["internal_config"], conffile)
|
||||||
|
if str(conffilepath).endswith(".json"):
|
||||||
|
self.read_json_config(conffilepath)
|
||||||
|
|
||||||
|
for dir_type in ["alternative_epg", "origin"]:
|
||||||
|
|
||||||
|
for file_item in os.listdir(self.internal["paths"][dir_type]):
|
||||||
|
file_item_path = os.path.join(self.internal["paths"][dir_type], file_item)
|
||||||
|
if str(file_item_path).endswith("_conf.json"):
|
||||||
|
self.read_json_config(file_item_path)
|
||||||
|
|
||||||
|
print("Loading Configuration File: " + str(self.config_file))
|
||||||
|
self.read_ini_config(self.config_file)
|
||||||
|
|
||||||
|
self.load_versions()
|
||||||
|
|
||||||
|
def load_versions(self):
|
||||||
|
|
||||||
|
self.internal["versions"] = {}
|
||||||
|
|
||||||
|
self.internal["versions"]["fHDHR"] = fHDHR_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["fHDHR_web"] = self.fHDHR_web.fHDHR_web_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"][self.origin.ORIGIN_NAME] = self.origin.ORIGIN_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["Python"] = sys.version
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
self.internal["versions"]["Operating System"] = opersystem
|
||||||
|
if opersystem in ["Linux", "Darwin"]:
|
||||||
|
# Linux/Mac
|
||||||
|
if os.getuid() == 0 or os.geteuid() == 0:
|
||||||
|
print('Warning: Do not run fHDHR with root privileges.')
|
||||||
|
elif opersystem in ["Windows"]:
|
||||||
|
# Windows
|
||||||
|
if os.environ.get("USERNAME") == "Administrator":
|
||||||
|
print('Warning: Do not run fHDHR as Administrator.')
|
||||||
|
else:
|
||||||
|
print("Uncommon Operating System, use at your own risk.")
|
||||||
|
|
||||||
|
isdocker = is_docker()
|
||||||
|
self.internal["versions"]["Docker"] = isdocker
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
try:
|
||||||
|
ffmpeg_command = [self.dict["ffmpeg"]["path"],
|
||||||
|
"-version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
ffmpeg_version = "Missing"
|
||||||
|
print("Failed to find ffmpeg.")
|
||||||
|
self.internal["versions"]["ffmpeg"] = ffmpeg_version
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "vlc":
|
||||||
|
try:
|
||||||
|
vlc_command = [self.dict["vlc"]["path"],
|
||||||
|
"--version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(vlc_command, stdout=subprocess.PIPE)
|
||||||
|
vlc_version = vlc_proc.stdout.read()
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
vlc_version = vlc_version.decode().split("version ")[1].split('\n')[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
vlc_version = "Missing"
|
||||||
|
print("Failed to find vlc.")
|
||||||
|
self.internal["versions"]["vlc"] = vlc_version
|
||||||
|
|
||||||
|
def read_json_config(self, conffilepath):
|
||||||
|
with open(conffilepath, 'r') as jsonconf:
|
||||||
|
confimport = json.load(jsonconf)
|
||||||
|
for section in list(confimport.keys()):
|
||||||
|
|
||||||
|
if section not in self.dict.keys():
|
||||||
|
self.dict[section] = {}
|
||||||
|
|
||||||
|
if section not in self.conf_default.keys():
|
||||||
|
self.conf_default[section] = {}
|
||||||
|
|
||||||
|
for key in list(confimport[section].keys()):
|
||||||
|
|
||||||
|
if key not in list(self.conf_default[section].keys()):
|
||||||
|
self.conf_default[section][key] = {}
|
||||||
|
|
||||||
|
confvalue = confimport[section][key]["value"]
|
||||||
|
if isint(confvalue):
|
||||||
|
confvalue = int(confvalue)
|
||||||
|
elif isfloat(confvalue):
|
||||||
|
confvalue = float(confvalue)
|
||||||
|
elif is_arithmetic(confvalue):
|
||||||
|
confvalue = eval(confvalue)
|
||||||
|
elif "," in confvalue:
|
||||||
|
confvalue = confvalue.split(",")
|
||||||
|
elif str(confvalue).lower() in ["none"]:
|
||||||
|
confvalue = None
|
||||||
|
elif str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
|
||||||
|
self.dict[section][key] = confvalue
|
||||||
|
|
||||||
|
self.conf_default[section][key]["value"] = confvalue
|
||||||
|
|
||||||
|
for config_option in ["config_web_hidden", "config_file", "config_web"]:
|
||||||
|
if config_option not in list(confimport[section][key].keys()):
|
||||||
|
config_option_value = False
|
||||||
|
else:
|
||||||
|
config_option_value = confimport[section][key][config_option]
|
||||||
|
if str(config_option_value).lower() in ["none"]:
|
||||||
|
config_option_value = None
|
||||||
|
elif str(config_option_value).lower() in ["false"]:
|
||||||
|
config_option_value = False
|
||||||
|
elif str(config_option_value).lower() in ["true"]:
|
||||||
|
config_option_value = True
|
||||||
|
self.conf_default[section][key][config_option] = config_option_value
|
||||||
|
|
||||||
|
def read_ini_config(self, conffilepath):
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(conffilepath)
|
||||||
|
for each_section in config_handler.sections():
|
||||||
|
if each_section.lower() not in list(self.dict.keys()):
|
||||||
|
self.dict[each_section.lower()] = {}
|
||||||
|
for (each_key, each_val) in config_handler.items(each_section):
|
||||||
|
if not each_val:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["none"]:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["false"]:
|
||||||
|
each_val = False
|
||||||
|
elif each_val.lower() in ["true"]:
|
||||||
|
each_val = True
|
||||||
|
elif isint(each_val):
|
||||||
|
each_val = int(each_val)
|
||||||
|
elif isfloat(each_val):
|
||||||
|
each_val = float(each_val)
|
||||||
|
elif is_arithmetic(each_val):
|
||||||
|
each_val = eval(each_val)
|
||||||
|
elif "," in each_val:
|
||||||
|
each_val = each_val.split(",")
|
||||||
|
|
||||||
|
import_val = True
|
||||||
|
if each_section in list(self.conf_default.keys()):
|
||||||
|
if each_key in list(self.conf_default[each_section].keys()):
|
||||||
|
if not self.conf_default[each_section][each_key]["config_file"]:
|
||||||
|
import_val = False
|
||||||
|
|
||||||
|
if import_val:
|
||||||
|
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
|
def write(self, section, key, value):
|
||||||
|
if section == self.dict["main"]["dictpopname"]:
|
||||||
|
self.dict["origin"][key] = value
|
||||||
|
else:
|
||||||
|
self.dict[section][key] = value
|
||||||
|
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(self.config_file)
|
||||||
|
|
||||||
|
if not config_handler.has_section(section):
|
||||||
|
config_handler.add_section(section)
|
||||||
|
|
||||||
|
config_handler.set(section, key, value)
|
||||||
|
|
||||||
|
with open(self.config_file, 'w') as config_file:
|
||||||
|
config_handler.write(config_file)
|
||||||
|
|
||||||
|
def config_verification(self):
|
||||||
|
|
||||||
|
if self.dict["main"]["thread_method"] not in ["threading", "multiprocessing"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Threading Method. Exiting...")
|
||||||
|
|
||||||
|
if self.dict["main"]["required"]:
|
||||||
|
required_missing = []
|
||||||
|
if isinstance(self.dict["main"]["required"], str):
|
||||||
|
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||||
|
if len(self.dict["main"]["required"]):
|
||||||
|
for req_item in self.dict["main"]["required"]:
|
||||||
|
req_section = req_item.split("/")[0]
|
||||||
|
req_key = req_item.split("/")[1]
|
||||||
|
if not self.dict[req_section][req_key]:
|
||||||
|
required_missing.append(req_item)
|
||||||
|
if len(required_missing):
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
|
||||||
|
|
||||||
|
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||||
|
|
||||||
|
if isinstance(self.dict["epg"]["valid_epg_methods"], str):
|
||||||
|
self.dict["epg"]["valid_epg_methods"] = [self.dict["epg"]["valid_epg_methods"]]
|
||||||
|
|
||||||
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
|
epg_methods = []
|
||||||
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
|
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||||
|
epg_methods.append("origin")
|
||||||
|
elif epg_method in ["None"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
elif epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
|
||||||
|
if not self.dict["main"]["uuid"]:
|
||||||
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
|
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
if self.dict["main"]["cache_dir"]:
|
||||||
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
|
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
|
cache_dir = self.internal["paths"]["cache_dir"]
|
||||||
|
|
||||||
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
|
self.internal["paths"]["logs_dir"] = logs_dir
|
||||||
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
|
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
|
|
||||||
|
def logging_setup(self):
|
||||||
|
|
||||||
|
log_level = self.dict["logging"]["level"].upper()
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||||
|
logger = logging.getLogger('fHDHR')
|
||||||
|
log_file = os.path.join(self.internal["paths"]["logs_dir"], 'fHDHR.log')
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler(log_file)
|
||||||
|
# c_handler.setLevel(log_level)
|
||||||
|
f_handler.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create formatters and add it to handlers
|
||||||
|
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
# c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
# logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
405
fHDHR/db/__init__.py
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from sqlalchemy import Column, create_engine, String, Text
|
||||||
|
from sqlalchemy.engine.url import URL
|
||||||
|
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize(value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
# sqlite likes to return ints for strings that look like ints, even though
|
||||||
|
# the column type is string. That's how you do dynamic typing wrong.
|
||||||
|
value = str(value)
|
||||||
|
# Just in case someone's mucking with the DB in a way we can't account for,
|
||||||
|
# ignore json parsing errors
|
||||||
|
try:
|
||||||
|
value = json.loads(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
BASE = declarative_base()
|
||||||
|
MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
||||||
|
'mysql_charset': 'utf8mb4',
|
||||||
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelValues(BASE):
|
||||||
|
__tablename__ = 'channel_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
channel = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class ProgramValues(BASE):
|
||||||
|
__tablename__ = 'program_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
program = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class CacheValues(BASE):
|
||||||
|
__tablename__ = 'cache_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
cacheitem = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRValues(BASE):
|
||||||
|
__tablename__ = 'fhdhr_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
item = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRdb(object):
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
# MySQL - mysql://username:password@localhost/db
|
||||||
|
# SQLite - sqlite:////cache/path/default.db
|
||||||
|
self.type = self.config.dict["database"]["type"]
|
||||||
|
|
||||||
|
# Handle SQLite explicitly as a default
|
||||||
|
if self.type == 'sqlite':
|
||||||
|
path = self.config.dict["database"]["path"]
|
||||||
|
path = os.path.expanduser(path)
|
||||||
|
self.filename = path
|
||||||
|
self.url = 'sqlite:///%s' % path
|
||||||
|
# Otherwise, handle all other database engines
|
||||||
|
else:
|
||||||
|
query = {}
|
||||||
|
if self.type == 'mysql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mysql'
|
||||||
|
query = {'charset': 'utf8mb4'}
|
||||||
|
elif self.type == 'postgres':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'postgresql'
|
||||||
|
elif self.type == 'oracle':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'oracle'
|
||||||
|
elif self.type == 'mssql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mssql+pymssql'
|
||||||
|
elif self.type == 'firebird':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'firebird+fdb'
|
||||||
|
elif self.type == 'sybase':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'sybase+pysybase'
|
||||||
|
else:
|
||||||
|
raise Exception('Unknown db_type')
|
||||||
|
|
||||||
|
db_user = self.config.dict["database"]["user"]
|
||||||
|
db_pass = self.config.dict["database"]["pass"]
|
||||||
|
db_host = self.config.dict["database"]["host"]
|
||||||
|
db_port = self.config.dict["database"]["port"] # Optional
|
||||||
|
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
|
||||||
|
|
||||||
|
# Ensure we have all our variables defined
|
||||||
|
if db_user is None or db_pass is None or db_host is None:
|
||||||
|
raise Exception('Please make sure the following core '
|
||||||
|
'configuration values are defined: '
|
||||||
|
'db_user, db_pass, db_host')
|
||||||
|
self.url = URL(drivername=drivername, username=db_user,
|
||||||
|
password=db_pass, host=db_host, port=db_port,
|
||||||
|
database=db_name, query=query)
|
||||||
|
|
||||||
|
self.engine = create_engine(self.url, pool_recycle=3600)
|
||||||
|
|
||||||
|
# Catch any errors connecting to database
|
||||||
|
try:
|
||||||
|
self.engine.connect()
|
||||||
|
except OperationalError:
|
||||||
|
print("OperationalError: Unable to connect to database.")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create our tables
|
||||||
|
BASE.metadata.create_all(self.engine)
|
||||||
|
|
||||||
|
self.ssession = scoped_session(sessionmaker(bind=self.engine))
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
if self.type != 'sqlite':
|
||||||
|
print(
|
||||||
|
"Raw connection requested when 'db_type' is not 'sqlite':\n"
|
||||||
|
"Consider using 'db.session()' to get a SQLAlchemy session "
|
||||||
|
"instead here:\n%s",
|
||||||
|
traceback.format_list(traceback.extract_stack()[:-1])[-1][:-1])
|
||||||
|
return self.engine.raw_connection()
|
||||||
|
|
||||||
|
def session(self):
|
||||||
|
return self.ssession()
|
||||||
|
|
||||||
|
def execute(self, *args, **kwargs):
|
||||||
|
return self.engine.execute(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_uri(self):
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Channel Values
|
||||||
|
|
||||||
|
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_channelvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Program Values
|
||||||
|
|
||||||
|
def set_program_value(self, program, key, value, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_programvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Cache Values
|
||||||
|
|
||||||
|
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# fHDHR Values
|
||||||
|
|
||||||
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
23
fHDHR/device/__init__.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from .channels import Channels
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .images import imageHandler
|
||||||
|
from .ssdp import SSDPServer
|
||||||
|
from .cluster import fHDHR_Cluster
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Device():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, originwrapper, alternative_epg):
|
||||||
|
|
||||||
|
self.channels = Channels(fhdhr, originwrapper)
|
||||||
|
|
||||||
|
self.epg = EPG(fhdhr, self.channels, originwrapper, alternative_epg)
|
||||||
|
|
||||||
|
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||||
|
|
||||||
|
self.images = imageHandler(fhdhr, self.epg)
|
||||||
|
|
||||||
|
self.ssdp = SSDPServer(fhdhr)
|
||||||
|
|
||||||
|
self.cluster = fHDHR_Cluster(fhdhr, self.ssdp)
|
||||||
114
fHDHR/device/channels/__init__.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import datetime
|
||||||
|
import time
|
||||||
|
|
||||||
|
from fHDHR.tools import hours_between_datetime
|
||||||
|
|
||||||
|
from .channel import Channel
|
||||||
|
from .chan_ident import Channel_IDs
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, originwrapper):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = originwrapper
|
||||||
|
|
||||||
|
self.id_system = Channel_IDs(fhdhr)
|
||||||
|
|
||||||
|
self.list = {}
|
||||||
|
self.list_update_time = None
|
||||||
|
|
||||||
|
self.get_db_channels()
|
||||||
|
haseverscanned = self.fhdhr.db.get_fhdhr_value("channels", "scanned_time")
|
||||||
|
if (self.fhdhr.config.dict["fhdhr"]["chanscan_on_start"] or not haseverscanned):
|
||||||
|
self.get_channels()
|
||||||
|
|
||||||
|
def get_channel_obj(self, keyfind, valfind):
|
||||||
|
return next(self.list[fhdhr_id] for fhdhr_id in list(self.list.keys()) if self.list[fhdhr_id].dict[keyfind] == valfind)
|
||||||
|
|
||||||
|
def get_channel_list(self, keyfind):
|
||||||
|
return [self.list[x].dict[keyfind] for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
def set_channel_status(self, keyfind, valfind, updatedict):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_status(updatedict)
|
||||||
|
|
||||||
|
def set_channel_enablement_all(self, enablement):
|
||||||
|
for fhdhr_id in list(self.list.keys()):
|
||||||
|
self.list[fhdhr_id].set_enablement(enablement)
|
||||||
|
|
||||||
|
def set_channel_enablement(self, keyfind, valfind, enablement):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_enablement(enablement)
|
||||||
|
|
||||||
|
def set_channel_favorite(self, keyfind, valfind, enablement):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_favorite(enablement)
|
||||||
|
|
||||||
|
def get_db_channels(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Channel information stored in the database.")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if len(channel_ids):
|
||||||
|
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||||
|
for channel_id in channel_ids:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, channel_id=channel_id)
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
|
def get_channels(self, forceupdate=False):
|
||||||
|
"""Pull Channels from origin.
|
||||||
|
|
||||||
|
Output a list.
|
||||||
|
|
||||||
|
Don't pull more often than 12 hours.
|
||||||
|
"""
|
||||||
|
|
||||||
|
updatelist = False
|
||||||
|
if not self.list_update_time:
|
||||||
|
updatelist = True
|
||||||
|
elif hours_between_datetime(self.list_update_time, datetime.datetime.now()) > 12:
|
||||||
|
updatelist = True
|
||||||
|
elif forceupdate:
|
||||||
|
updatelist = True
|
||||||
|
|
||||||
|
if updatelist:
|
||||||
|
channel_origin_id_list = [str(self.list[x].dict["origin_id"]) for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Performing Channel Scan.")
|
||||||
|
|
||||||
|
channel_dict_list = self.origin.get_channels()
|
||||||
|
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), self.fhdhr.config.dict["main"]["servicename"]))
|
||||||
|
|
||||||
|
newchan = 0
|
||||||
|
for channel_info in channel_dict_list:
|
||||||
|
|
||||||
|
chan_existing = False
|
||||||
|
if str(channel_info["id"]) in channel_origin_id_list:
|
||||||
|
chan_existing = True
|
||||||
|
channel_obj = self.get_channel_obj("origin_id", channel_info["id"])
|
||||||
|
else:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin_id=channel_info["id"])
|
||||||
|
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
channel_obj.basics(channel_info)
|
||||||
|
if not chan_existing:
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
newchan += 1
|
||||||
|
|
||||||
|
if not newchan:
|
||||||
|
newchan = "no"
|
||||||
|
self.fhdhr.logger.info("Found %s NEW channels." % newchan)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Total Channel Count: %s" % len(self.list.keys()))
|
||||||
|
|
||||||
|
self.list_update_time = datetime.datetime.now()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time())
|
||||||
|
|
||||||
|
channel_list = []
|
||||||
|
for chan_obj in list(self.list.keys()):
|
||||||
|
channel_list.append(self.list[chan_obj].dict)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
|
def get_channel_stream(self, channel_number):
|
||||||
|
return self.origin.get_channel_stream(self.get_channel_dict("number", channel_number))
|
||||||
|
|
||||||
|
def get_channel_dict(self, keyfind, valfind):
|
||||||
|
return self.get_channel_obj(keyfind, valfind).dict
|
||||||
38
fHDHR/device/channels/chan_ident.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class Channel_IDs():
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get(self, origin_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
for existing_channel in existing_channel_info:
|
||||||
|
if existing_channel["origin_id"] == origin_id:
|
||||||
|
return existing_channel["id"]
|
||||||
|
return self.assign()
|
||||||
|
|
||||||
|
def assign(self):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
channel_id = None
|
||||||
|
while not channel_id:
|
||||||
|
unique_id = str(uuid.uuid4())
|
||||||
|
if str(unique_id) not in existing_ids:
|
||||||
|
channel_id = str(unique_id)
|
||||||
|
existing_ids.append(channel_id)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids)
|
||||||
|
return channel_id
|
||||||
|
|
||||||
|
def get_number(self, channel_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||||
|
if cnumber:
|
||||||
|
return cnumber
|
||||||
|
|
||||||
|
used_numbers = [existing_channel["number"] for existing_channel in existing_channel_info]
|
||||||
|
for i in range(1000, 2000):
|
||||||
|
if str(float(i)) not in used_numbers:
|
||||||
|
break
|
||||||
|
return str(float(i))
|
||||||
177
fHDHR/device/channels/channel.py
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class Channel():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, id_system, origin_id=None, channel_id=None):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.id_system = id_system
|
||||||
|
|
||||||
|
if not channel_id:
|
||||||
|
if origin_id:
|
||||||
|
channel_id = id_system.get(origin_id)
|
||||||
|
else:
|
||||||
|
channel_id = id_system.assign()
|
||||||
|
self.channel_id = channel_id
|
||||||
|
self.dict = self.fhdhr.db.get_channel_value(str(channel_id), "dict") or self.default_dict
|
||||||
|
self.verify_dict()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def thumbnail(self):
|
||||||
|
if str(self.dict["thumbnail"]).lower() in ["none"]:
|
||||||
|
return self.generic_image_url
|
||||||
|
elif self.dict["thumbnail"]:
|
||||||
|
return self.dict["thumbnail"]
|
||||||
|
elif self.dict["origin_thumbnail"]:
|
||||||
|
return self.dict["origin_thumbnail"]
|
||||||
|
else:
|
||||||
|
return self.generic_image_url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epgdict(self):
|
||||||
|
return {
|
||||||
|
"callsign": self.dict["callsign"],
|
||||||
|
"name": self.dict["name"],
|
||||||
|
"number": self.dict["number"],
|
||||||
|
"id": self.dict["origin_id"],
|
||||||
|
"thumbnail": self.dict["thumbnail"],
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def verify_dict(self):
|
||||||
|
"""Development Purposes
|
||||||
|
Add new Channel dict keys
|
||||||
|
"""
|
||||||
|
default_dict = self.default_dict
|
||||||
|
for key in list(default_dict.keys()):
|
||||||
|
if key not in list(self.dict.keys()):
|
||||||
|
self.dict[key] = default_dict[key]
|
||||||
|
|
||||||
|
def basics(self, channel_info):
|
||||||
|
"""Some Channel Information is Critical"""
|
||||||
|
|
||||||
|
if "name" not in list(channel_info.keys()):
|
||||||
|
channel_info["name"] = self.dict["id"]
|
||||||
|
self.dict["origin_name"] = channel_info["name"]
|
||||||
|
if not self.dict["name"]:
|
||||||
|
self.dict["name"] = self.dict["origin_name"]
|
||||||
|
|
||||||
|
if "id" not in list(channel_info.keys()):
|
||||||
|
channel_info["id"] = channel_info["name"]
|
||||||
|
self.dict["origin_id"] = channel_info["id"]
|
||||||
|
|
||||||
|
if "callsign" not in list(channel_info.keys()):
|
||||||
|
channel_info["callsign"] = channel_info["name"]
|
||||||
|
self.dict["origin_callsign"] = channel_info["callsign"]
|
||||||
|
if not self.dict["callsign"]:
|
||||||
|
self.dict["callsign"] = self.dict["origin_callsign"]
|
||||||
|
|
||||||
|
if "tags" not in list(channel_info.keys()):
|
||||||
|
channel_info["tags"] = []
|
||||||
|
self.dict["origin_tags"] = channel_info["tags"]
|
||||||
|
if not self.dict["tags"]:
|
||||||
|
self.dict["tags"] = self.dict["origin_tags"]
|
||||||
|
|
||||||
|
if "number" not in list(channel_info.keys()):
|
||||||
|
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
||||||
|
self.dict["origin_number"] = str(float(channel_info["number"]))
|
||||||
|
if not self.dict["number"]:
|
||||||
|
self.dict["number"] = self.dict["origin_number"]
|
||||||
|
|
||||||
|
if "thumbnail" not in list(channel_info.keys()):
|
||||||
|
channel_info["thumbnail"] = None
|
||||||
|
self.dict["origin_thumbnail"] = channel_info["thumbnail"]
|
||||||
|
if not self.dict["thumbnail"]:
|
||||||
|
self.dict["thumbnail"] = self.dict["origin_thumbnail"]
|
||||||
|
|
||||||
|
if "HD" not in list(channel_info.keys()):
|
||||||
|
channel_info["HD"] = 0
|
||||||
|
self.dict["HD"] = channel_info["HD"]
|
||||||
|
|
||||||
|
if "enabled" in list(channel_info.keys()):
|
||||||
|
if "created" not in list(self.dict.keys()):
|
||||||
|
self.dict["enabled"] = channel_info["enabled"]
|
||||||
|
|
||||||
|
if "created" not in list(self.dict.keys()):
|
||||||
|
self.dict["created"] = time.time()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def default_dict(self):
|
||||||
|
return {
|
||||||
|
"id": str(self.channel_id), "origin_id": None,
|
||||||
|
"name": None, "origin_name": None,
|
||||||
|
"callsign": None, "origin_callsign": None,
|
||||||
|
"number": None, "origin_number": None,
|
||||||
|
"tags": [], "origin_tags": [],
|
||||||
|
"thumbnail": None, "origin_thumbnail": None,
|
||||||
|
"enabled": True, "favorite": 0,
|
||||||
|
"HD": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
self.fhdhr.db.delete_channel_value(self.dict["id"], "dict")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if self.dict["id"] in channel_ids:
|
||||||
|
channel_ids.remove(self.dict["id"])
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
||||||
|
|
||||||
|
def set_status(self, updatedict):
|
||||||
|
for key in list(updatedict.keys()):
|
||||||
|
if key == "number":
|
||||||
|
updatedict[key] = str(float(updatedict[key]))
|
||||||
|
self.dict[key] = updatedict[key]
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def lineup_dict(self):
|
||||||
|
return {
|
||||||
|
'GuideNumber': self.dict['number'],
|
||||||
|
'GuideName': self.dict['name'],
|
||||||
|
'Tags': ",".join(self.dict['tags']),
|
||||||
|
'URL': self.stream_url,
|
||||||
|
'HD': self.dict["HD"],
|
||||||
|
"Favorite": self.dict["favorite"],
|
||||||
|
}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def generic_image_url(self):
|
||||||
|
return "/api/images?method=generate&type=channel&message=%s" % self.dict["number"]
|
||||||
|
|
||||||
|
@property
|
||||||
|
def stream_url(self):
|
||||||
|
return '/auto/v%s' % self.dict['number']
|
||||||
|
|
||||||
|
@property
|
||||||
|
def play_url(self):
|
||||||
|
return '/api/m3u?method=get&channel=%s' % self.dict['number']
|
||||||
|
|
||||||
|
def set_favorite(self, enablement):
|
||||||
|
if enablement == "+":
|
||||||
|
self.dict["favorite"] = 1
|
||||||
|
elif enablement == "+":
|
||||||
|
self.dict["favorite"] = 0
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
||||||
|
|
||||||
|
def set_enablement(self, enablement):
|
||||||
|
if enablement == "disable":
|
||||||
|
self.dict["enabled"] = False
|
||||||
|
elif enablement == "enable":
|
||||||
|
self.dict["enabled"] = True
|
||||||
|
elif enablement == "toggle":
|
||||||
|
if self.dict["enabled"]:
|
||||||
|
self.dict["enabled"] = False
|
||||||
|
else:
|
||||||
|
self.dict["enabled"] = True
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "info", self.dict)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
158
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,158 @@
|
|||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Cluster():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, ssdp):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp = ssdp
|
||||||
|
|
||||||
|
self.friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_cluster_dicts_web(self):
|
||||||
|
fhdhr_list = self.cluster()
|
||||||
|
locations = []
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
item_dict = {
|
||||||
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
|
"name": fhdhr_list[location]["name"]
|
||||||
|
}
|
||||||
|
if item_dict["base_url"] != self.fhdhr.api.base:
|
||||||
|
locations.append(item_dict)
|
||||||
|
if len(locations):
|
||||||
|
locations = sorted(locations, key=lambda i: i['name'])
|
||||||
|
return locations
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.ssdp.detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.fhdhr.api.base] = {
|
||||||
|
"base_url": self.fhdhr.api.base,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
self.fhdhr.logger.info("Syncronizing with Cluster.")
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if not len(list(cluster.keys())):
|
||||||
|
self.fhdhr.logger.info("No Cluster Found.")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
self.fhdhr.logger.info("Checking Cluster Syncronization information from %s." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.fhdhr.api.base not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.fhdhr.logger.info("Leaving cluster.")
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
self.fhdhr.logger.info("Informing %s that I am departing the Cluster." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=del&location=" + self.fhdhr.api.base
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.fhdhr.api.base:
|
||||||
|
sync_url = location + "/api/cluster?method=sync&location=" + self.fhdhr.api.base_quoted
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Adding %s to cluster." % location)
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = "%s/hdhr/discover.json" % location
|
||||||
|
try:
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["FriendlyName"]
|
||||||
|
|
||||||
|
cluster_info_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.fhdhr.web.session.get(cluster_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Removing %s from cluster." % location)
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = location + "/api/cluster?method=leave"
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.push_sync()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
191
fHDHR/device/epg/__init__.py
Normal file
@ -0,0 +1,191 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
from .blocks import blocksEPG
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels, originwrapper, alternative_epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = originwrapper
|
||||||
|
self.channels = channels
|
||||||
|
self.alternative_epg = alternative_epg
|
||||||
|
|
||||||
|
self.epgdict = {}
|
||||||
|
|
||||||
|
self.epg_methods = self.fhdhr.config.dict["epg"]["method"]
|
||||||
|
self.valid_epg_methods = [x for x in self.fhdhr.config.dict["epg"]["valid_epg_methods"] if x and x not in [None, "None"]]
|
||||||
|
|
||||||
|
self.blocks = blocksEPG(self.fhdhr, self.channels)
|
||||||
|
self.epg_handling = {
|
||||||
|
"origin": self.origin,
|
||||||
|
"blocks": self.blocks,
|
||||||
|
}
|
||||||
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
|
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||||
|
self.sleeptime = {}
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||||
|
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
||||||
|
if epg_method not in list(self.sleeptime.keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
||||||
|
|
||||||
|
self.epg_update_url = "%s/api/epg?method=update" % (self.fhdhr.api.base)
|
||||||
|
|
||||||
|
def clear_epg_cache(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Clearing " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
if hasattr(self.epg_handling[method], 'clear_cache'):
|
||||||
|
self.epg_handling[method].clear_cache()
|
||||||
|
|
||||||
|
if method in list(self.epgdict.keys()):
|
||||||
|
del self.epgdict[method]
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
||||||
|
|
||||||
|
def whats_on_now(self, channel, method=None):
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
listings = epgdict[channel]["listing"]
|
||||||
|
for listing in listings:
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
||||||
|
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
if start_time <= nowtime <= end_time:
|
||||||
|
epgitem = epgdict[channel].copy()
|
||||||
|
epgitem["listing"] = [listing]
|
||||||
|
return epgitem
|
||||||
|
return None
|
||||||
|
|
||||||
|
def whats_on_allchans(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
channel_guide_list = []
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
channels = list(epgdict.keys())
|
||||||
|
for channel in channels:
|
||||||
|
whatson = self.whats_on_now(epgdict[channel]["number"], method)
|
||||||
|
if whatson:
|
||||||
|
channel_guide_list.append(whatson)
|
||||||
|
return channel_guide_list
|
||||||
|
|
||||||
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
if method not in list(self.epgdict.keys()):
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or None
|
||||||
|
if not epgdict:
|
||||||
|
self.update(method)
|
||||||
|
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
||||||
|
else:
|
||||||
|
self.epgdict[method] = epgdict
|
||||||
|
return self.epgdict[method]
|
||||||
|
else:
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
def get_thumbnail(self, itemtype, itemid):
|
||||||
|
if itemtype == "channel":
|
||||||
|
chandict = self.find_channel_dict(itemid)
|
||||||
|
return chandict["thumbnail"]
|
||||||
|
elif itemtype == "content":
|
||||||
|
progdict = self.find_program_dict(itemid)
|
||||||
|
return progdict["thumbnail"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_channel_dict(self, channel_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
channel_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
channel_list.append(epgdict[channel])
|
||||||
|
return next(item for item in channel_list if item["id"] == channel_id)
|
||||||
|
|
||||||
|
def find_program_dict(self, event_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
event_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
event_list.extend(epgdict[channel]["listing"])
|
||||||
|
return next(item for item in event_list if item["id"] == event_id)
|
||||||
|
|
||||||
|
def epg_method_selfadd(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Alternative EPG methods.")
|
||||||
|
new_epgtype_list = []
|
||||||
|
for entry in os.scandir(self.fhdhr.config.internal["paths"]["alternative_epg"]):
|
||||||
|
if entry.is_file():
|
||||||
|
if entry.name[0] != '_' and entry.name.endswith(".py"):
|
||||||
|
new_epgtype_list.append(str(entry.name[:-3]))
|
||||||
|
for method in new_epgtype_list:
|
||||||
|
self.fhdhr.logger.info("Found %s EPG method." % method)
|
||||||
|
self.epg_handling[method] = eval("self.alternative_epg.%s.%sEPG(self.fhdhr, self.channels)" % (method, method))
|
||||||
|
|
||||||
|
def update(self, method=None):
|
||||||
|
|
||||||
|
if (not method or
|
||||||
|
method not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]):
|
||||||
|
method = self.def_method
|
||||||
|
|
||||||
|
if method == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Updating " + epgtypename + " EPG cache.")
|
||||||
|
if method == 'origin':
|
||||||
|
programguide = self.epg_handling['origin'].update_epg(self.channels)
|
||||||
|
else:
|
||||||
|
programguide = self.epg_handling[method].update_epg()
|
||||||
|
|
||||||
|
for chan in list(programguide.keys()):
|
||||||
|
floatnum = str(float(chan))
|
||||||
|
programguide[floatnum] = programguide.pop(chan)
|
||||||
|
programguide[floatnum]["number"] = floatnum
|
||||||
|
|
||||||
|
programguide = OrderedDict(sorted(programguide.items()))
|
||||||
|
|
||||||
|
for cnum in programguide:
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
|
||||||
|
self.epgdict = programguide
|
||||||
|
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
|
self.fhdhr.logger.info("Wrote " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
self.fhdhr.web.session.get(self.epg_update_url)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if time.time() >= (self.fhdhr.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
|
||||||
|
self.fhdhr.web.session.get(self.epg_update_url)
|
||||||
|
time.sleep(360)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
74
fHDHR/device/epg/blocks.py
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class blocksEPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
timestamps = self.timestamps
|
||||||
|
|
||||||
|
for fhdhr_id in list(self.channels.list.keys()):
|
||||||
|
chan_obj = self.channels.list[fhdhr_id]
|
||||||
|
|
||||||
|
if str(chan_obj.dict["number"]) not in list(programguide.keys()):
|
||||||
|
programguide[str(chan_obj.dict["number"])] = chan_obj.epgdict
|
||||||
|
|
||||||
|
clean_prog_dicts = self.empty_channel_epg(timestamps, chan_obj)
|
||||||
|
for clean_prog_dict in clean_prog_dicts:
|
||||||
|
programguide[str(chan_obj.dict["number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
return programguide
|
||||||
|
|
||||||
|
def get_content_thumbnail(self, content_id):
|
||||||
|
return "/api/images?method=generate&type=content&message=%s" % content_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def timestamps(self):
|
||||||
|
timestamps = []
|
||||||
|
todaydate = datetime.date.today()
|
||||||
|
for x in range(0, 6):
|
||||||
|
xdate = todaydate + datetime.timedelta(days=x)
|
||||||
|
xtdate = xdate + datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
for hour in range(0, 24):
|
||||||
|
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
|
||||||
|
if hour + 1 < 24:
|
||||||
|
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
|
||||||
|
else:
|
||||||
|
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
return timestamps
|
||||||
|
|
||||||
|
def empty_channel_epg(self, timestamps, chan_obj):
|
||||||
|
clean_prog_dicts = []
|
||||||
|
for timestamp in timestamps:
|
||||||
|
content_id = "%s_%s" % (chan_obj.dict["origin_id"], str(timestamp['time_start']).split(" ")[0])
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": timestamp['time_start'],
|
||||||
|
"time_end": timestamp['time_end'],
|
||||||
|
"duration_minutes": 60,
|
||||||
|
"thumbnail": chan_obj.dict["thumbnail"] or self.get_content_thumbnail(content_id),
|
||||||
|
"title": "Unavailable",
|
||||||
|
"sub-title": "Unavailable",
|
||||||
|
"description": "Unavailable",
|
||||||
|
"rating": "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": content_id,
|
||||||
|
}
|
||||||
|
clean_prog_dicts.append(clean_prog_dict)
|
||||||
|
return clean_prog_dicts
|
||||||
60
fHDHR/device/images.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from io import BytesIO
|
||||||
|
import PIL.Image
|
||||||
|
import PIL.ImageDraw
|
||||||
|
import PIL.ImageFont
|
||||||
|
|
||||||
|
|
||||||
|
class imageHandler():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get_epg_image(self, image_type, content_id):
|
||||||
|
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
|
||||||
|
if not imageUri:
|
||||||
|
return self.generate_image(image_type, str(content_id))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(imageUri)
|
||||||
|
return req.content
|
||||||
|
|
||||||
|
def getSize(self, txt, font):
|
||||||
|
testImg = PIL.Image.new('RGB', (1, 1))
|
||||||
|
testDraw = PIL.ImageDraw.Draw(testImg)
|
||||||
|
return testDraw.textsize(txt, font)
|
||||||
|
|
||||||
|
def generate_image(self, messagetype, message):
|
||||||
|
if messagetype == "channel":
|
||||||
|
width = 360
|
||||||
|
height = 270
|
||||||
|
fontsize = 72
|
||||||
|
elif messagetype == "content":
|
||||||
|
width = 1080
|
||||||
|
height = 1440
|
||||||
|
fontsize = 100
|
||||||
|
|
||||||
|
colorBackground = "#228822"
|
||||||
|
colorText = "#717D7E"
|
||||||
|
colorOutline = "#717D7E"
|
||||||
|
fontname = str(self.fhdhr.config.internal["paths"]["font"])
|
||||||
|
|
||||||
|
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||||
|
text_width, text_height = self.getSize(message, font)
|
||||||
|
img = PIL.Image.new('RGBA', (width+4, height+4), colorBackground)
|
||||||
|
d = PIL.ImageDraw.Draw(img)
|
||||||
|
d.text(((width-text_width)/2, (height-text_height)/2), message, fill=colorText, font=font)
|
||||||
|
d.rectangle((0, 0, width+3, height+3), outline=colorOutline)
|
||||||
|
|
||||||
|
s = BytesIO()
|
||||||
|
img.save(s, 'png')
|
||||||
|
return s.getvalue()
|
||||||
|
|
||||||
|
def get_image_type(self, image_data):
|
||||||
|
header_byte = image_data[0:3].hex().lower()
|
||||||
|
if header_byte == '474946':
|
||||||
|
return "image/gif"
|
||||||
|
elif header_byte == '89504e':
|
||||||
|
return "image/png"
|
||||||
|
elif header_byte == 'ffd8ff':
|
||||||
|
return "image/jpeg"
|
||||||
|
else:
|
||||||
|
return "image/jpeg"
|
||||||
208
fHDHR/device/ssdp/__init__.py
Normal file
@ -0,0 +1,208 @@
|
|||||||
|
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
import time
|
||||||
|
|
||||||
|
from .ssdp_detect import fHDHR_Detect
|
||||||
|
from .rmg_ssdp import RMG_SSDP
|
||||||
|
from .hdhr_ssdp import HDHR_SSDP
|
||||||
|
|
||||||
|
|
||||||
|
class SSDPServer():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.detect_method = fHDHR_Detect(fhdhr)
|
||||||
|
|
||||||
|
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
|
||||||
|
self.fhdhr.config.dict["ssdp"]["enabled"]):
|
||||||
|
self.setup_ssdp()
|
||||||
|
|
||||||
|
self.sock.bind((self.bind_address, 1900))
|
||||||
|
|
||||||
|
self.msearch_payload = self.create_msearch_payload()
|
||||||
|
|
||||||
|
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
|
||||||
|
self.age_time = None
|
||||||
|
|
||||||
|
self.rmg_ssdp = RMG_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
||||||
|
self.hdhr_ssdp = HDHR_SSDP(fhdhr, self.broadcast_ip, self.max_age)
|
||||||
|
|
||||||
|
self.do_alive()
|
||||||
|
self.m_search()
|
||||||
|
|
||||||
|
def do_alive(self, forcealive=False):
|
||||||
|
|
||||||
|
send_alive = False
|
||||||
|
if not self.age_time:
|
||||||
|
send_alive = True
|
||||||
|
elif forcealive:
|
||||||
|
send_alive = True
|
||||||
|
elif time.time() >= (self.age_time + self.max_age):
|
||||||
|
send_alive = True
|
||||||
|
|
||||||
|
if send_alive:
|
||||||
|
self.fhdhr.logger.info("Sending Alive message to network.")
|
||||||
|
self.do_notify(self.broadcase_address_tuple)
|
||||||
|
self.age_time = time.time()
|
||||||
|
|
||||||
|
def do_notify(self, address):
|
||||||
|
|
||||||
|
notify_list = []
|
||||||
|
|
||||||
|
hdhr_notify = self.hdhr_ssdp.get()
|
||||||
|
notify_list.append(hdhr_notify)
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["rmg"]["enabled"]:
|
||||||
|
rmg_notify = self.rmg_ssdp.get()
|
||||||
|
notify_list.append(rmg_notify)
|
||||||
|
|
||||||
|
for notifydata in notify_list:
|
||||||
|
|
||||||
|
self.fhdhr.logger.debug("Created {}".format(notifydata))
|
||||||
|
try:
|
||||||
|
self.sock.sendto(notifydata, address)
|
||||||
|
except OSError as e:
|
||||||
|
# Most commonly: We received a multicast from an IP not in our subnet
|
||||||
|
self.fhdhr.logger.debug("Unable to send NOTIFY: %s" % e)
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_recv(self, data, address):
|
||||||
|
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
||||||
|
|
||||||
|
try:
|
||||||
|
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||||
|
except ValueError:
|
||||||
|
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
||||||
|
return
|
||||||
|
|
||||||
|
lines = header.split('\r\n')
|
||||||
|
cmd = lines[0].split(' ')
|
||||||
|
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||||
|
lines = filter(lambda x: len(x) > 0, lines)
|
||||||
|
|
||||||
|
headers = [x.split(':', 1) for x in lines]
|
||||||
|
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||||
|
|
||||||
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
|
# SSDP discovery
|
||||||
|
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
|
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
||||||
|
|
||||||
|
self.do_notify(address)
|
||||||
|
|
||||||
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
# SSDP presence
|
||||||
|
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
|
try:
|
||||||
|
if headers["server"].startswith("fHDHR"):
|
||||||
|
savelocation = headers["location"].split("/device.xml")[0]
|
||||||
|
if savelocation.endswith("/hdhr"):
|
||||||
|
savelocation = savelocation.replace("/hdhr", '')
|
||||||
|
elif savelocation.endswith("/rmg"):
|
||||||
|
savelocation = savelocation.replace("/rmg", '')
|
||||||
|
if savelocation != self.fhdhr.api.base:
|
||||||
|
self.detect_method.set(savelocation)
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
|
def m_search(self):
|
||||||
|
data = self.msearch_payload
|
||||||
|
self.sock.sendto(data, self.broadcase_address_tuple)
|
||||||
|
|
||||||
|
def create_msearch_payload(self):
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "M-SEARCH * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
||||||
|
"MAN": "ssdp:discover",
|
||||||
|
"ST": "ssdp:all",
|
||||||
|
"MX": 1,
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
data, address = self.sock.recvfrom(1024)
|
||||||
|
self.on_recv(data, address)
|
||||||
|
self.do_alive()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.sock.close()
|
||||||
|
|
||||||
|
def setup_ssdp(self):
|
||||||
|
self.sock = None
|
||||||
|
|
||||||
|
self.proto = self.setup_proto()
|
||||||
|
self.iface = self.fhdhr.config.dict["ssdp"]["iface"]
|
||||||
|
self.address = self.fhdhr.config.dict["ssdp"]["multicast_address"]
|
||||||
|
self.setup_addressing()
|
||||||
|
|
||||||
|
self.sock = socket.socket(self.af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
self.setup_interface()
|
||||||
|
|
||||||
|
self.setup_multicasting()
|
||||||
|
|
||||||
|
def setup_proto(self):
|
||||||
|
proto = self.fhdhr.config.dict["ssdp"]["proto"]
|
||||||
|
allowed_protos = ("ipv4", "ipv6")
|
||||||
|
if proto not in allowed_protos:
|
||||||
|
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||||
|
return proto
|
||||||
|
|
||||||
|
def setup_addressing(self):
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
self.af_type = socket.AF_INET
|
||||||
|
self.broadcast_ip = "239.255.255.250"
|
||||||
|
self.broadcase_address_tuple = (self.broadcast_ip, 1900)
|
||||||
|
self.bind_address = "0.0.0.0"
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
self.af_type = socket.AF_INET6
|
||||||
|
self.broadcast_ip = "ff02::c"
|
||||||
|
self.broadcast_address_tuple = (self.broadcast_ip, 1900, 0, 0)
|
||||||
|
self.bind_address = "::"
|
||||||
|
|
||||||
|
def setup_interface(self):
|
||||||
|
# Bind to specific interface
|
||||||
|
if self.iface is not None:
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||||
|
|
||||||
|
def setup_multicasting(self):
|
||||||
|
# Subscribe to multicast address
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
mreq = socket.inet_aton(self.broadcast_ip)
|
||||||
|
if self.address is not None:
|
||||||
|
mreq += socket.inet_aton(self.address)
|
||||||
|
else:
|
||||||
|
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
|
||||||
|
# Allow multicasts on loopback devices (necessary for testing)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||||
|
mreq = socket.inet_pton(socket.AF_INET6, self.broadcast_ip)
|
||||||
|
if self.iface is not None:
|
||||||
|
iface_index = socket.if_nametoindex(self.iface)
|
||||||
|
# Send outgoing packets from the same interface
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||||
|
mreq += struct.pack(b"@I", iface_index)
|
||||||
|
else:
|
||||||
|
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||||
|
)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||||
39
fHDHR/device/ssdp/hdhr_ssdp.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class HDHR_SSDP():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, broadcast_ip, max_age):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp_content = None
|
||||||
|
|
||||||
|
self.broadcast_ip = broadcast_ip
|
||||||
|
self.device_xml_path = '/device.xml'
|
||||||
|
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self.ssdp_content:
|
||||||
|
return self.ssdp_content.encode("utf-8")
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "NOTIFY * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
||||||
|
"NT": 'urn:schemas-upnp-org:device:MediaServer:1',
|
||||||
|
"NTS": "ssdp:alive",
|
||||||
|
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], 'urn:schemas-upnp-org:device:MediaServer:1'),
|
||||||
|
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
||||||
|
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"AL": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"Cache-Control:max-age=": self.max_age
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
self.ssdp_content = data
|
||||||
|
return data.encode("utf-8")
|
||||||
39
fHDHR/device/ssdp/rmg_ssdp.py
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class RMG_SSDP():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, broadcast_ip, max_age):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp_content = None
|
||||||
|
|
||||||
|
self.broadcast_ip = broadcast_ip
|
||||||
|
self.device_xml_path = '/device.xml'
|
||||||
|
|
||||||
|
self.max_age = max_age
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
if self.ssdp_content:
|
||||||
|
return self.ssdp_content.encode("utf-8")
|
||||||
|
|
||||||
|
data = ''
|
||||||
|
data_command = "NOTIFY * HTTP/1.1"
|
||||||
|
|
||||||
|
data_dict = {
|
||||||
|
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
|
||||||
|
"NT": 'urn:schemas-upnp-org:device-1-0',
|
||||||
|
"NTS": "ssdp:alive",
|
||||||
|
"USN": 'uuid:%s::%s' % (self.fhdhr.config.dict["main"]["uuid"], 'urn:schemas-upnp-org:device-1-0'),
|
||||||
|
"SERVER": 'fHDHR/%s UPnP/1.0' % self.fhdhr.version,
|
||||||
|
"LOCATION": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"AL": "%s%s" % (self.fhdhr.api.base, self.device_xml_path),
|
||||||
|
"Cache-Control:max-age=": self.max_age
|
||||||
|
}
|
||||||
|
|
||||||
|
data += "%s\r\n" % data_command
|
||||||
|
for data_key in list(data_dict.keys()):
|
||||||
|
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
|
||||||
|
data += "\r\n"
|
||||||
|
|
||||||
|
self.ssdp_content = data
|
||||||
|
return data.encode("utf-8")
|
||||||
16
fHDHR/device/ssdp/ssdp_detect.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Detect():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
|
def set(self, location):
|
||||||
|
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
if location not in detect_list:
|
||||||
|
detect_list.append(location)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
106
fHDHR/device/tuners/__init__.py
Normal file
@ -0,0 +1,106 @@
|
|||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.epg = epg
|
||||||
|
self.max_tuners = int(self.fhdhr.config.dict["fhdhr"]["tuner_count"])
|
||||||
|
|
||||||
|
self.tuners = {}
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Creating %s tuners." % str(self.max_tuners))
|
||||||
|
|
||||||
|
for i in range(0, self.max_tuners):
|
||||||
|
self.tuners[str(i)] = Tuner(fhdhr, i, epg)
|
||||||
|
|
||||||
|
def get_available_tuner(self):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners.keys()) if not self.tuners[tunernum].tuner_lock.locked()) or None
|
||||||
|
|
||||||
|
def get_scanning_tuner(self):
|
||||||
|
return next(tunernum for tunernum in list(self.tuners.keys()) if self.tuners[tunernum].status["status"] == "Scanning") or None
|
||||||
|
|
||||||
|
def stop_tuner_scan(self):
|
||||||
|
tunernum = self.get_scanning_tuner()
|
||||||
|
if tunernum:
|
||||||
|
self.tuners[str(tunernum)].close()
|
||||||
|
|
||||||
|
def tuner_scan(self):
|
||||||
|
"""Temporarily use a tuner for a scan"""
|
||||||
|
if not self.available_tuner_count():
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
tunernumber = self.get_available_tuner()
|
||||||
|
self.tuners[str(tunernumber)].channel_scan()
|
||||||
|
|
||||||
|
if not tunernumber:
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
def tuner_grab(self, tuner_number, channel_number):
|
||||||
|
|
||||||
|
if str(tuner_number) not in list(self.tuners.keys()):
|
||||||
|
self.fhdhr.logger.error("Tuner %s does not exist." % str(tuner_number))
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
# TunerError will raise if unavailable
|
||||||
|
self.tuners[str(tuner_number)].grab(channel_number)
|
||||||
|
|
||||||
|
return tuner_number
|
||||||
|
|
||||||
|
def first_available(self, channel_number):
|
||||||
|
|
||||||
|
if not self.available_tuner_count():
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
tunernumber = self.get_available_tuner()
|
||||||
|
|
||||||
|
if not tunernumber:
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
else:
|
||||||
|
self.tuners[str(tunernumber)].grab(channel_number)
|
||||||
|
return tunernumber
|
||||||
|
|
||||||
|
def tuner_close(self, tunernum):
|
||||||
|
self.tuners[str(tunernum)].close()
|
||||||
|
|
||||||
|
def status(self):
|
||||||
|
all_status = {}
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
all_status[tunernum] = self.tuners[str(tunernum)].get_status()
|
||||||
|
return all_status
|
||||||
|
|
||||||
|
def available_tuner_count(self):
|
||||||
|
available_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
if not self.tuners[str(tunernum)].tuner_lock.locked():
|
||||||
|
available_tuners += 1
|
||||||
|
return available_tuners
|
||||||
|
|
||||||
|
def inuse_tuner_count(self):
|
||||||
|
inuse_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
if self.tuners[str(tunernum)].tuner_lock.locked():
|
||||||
|
inuse_tuners += 1
|
||||||
|
return inuse_tuners
|
||||||
|
|
||||||
|
def get_stream_info(self, stream_args):
|
||||||
|
|
||||||
|
stream_args["channelUri"] = self.channels.get_channel_stream(str(stream_args["channel"]))
|
||||||
|
if not stream_args["channelUri"]:
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
channelUri_headers = self.fhdhr.web.session.head(stream_args["channelUri"]).headers
|
||||||
|
stream_args["true_content_type"] = channelUri_headers['Content-Type']
|
||||||
|
|
||||||
|
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
|
stream_args["content_type"] = "video/mpeg"
|
||||||
|
else:
|
||||||
|
stream_args["content_type"] = stream_args["true_content_type"]
|
||||||
|
|
||||||
|
return stream_args
|
||||||
27
fHDHR/device/tuners/stream/__init__.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .direct_stream import Direct_Stream
|
||||||
|
from .direct_m3u8_stream import Direct_M3U8_Stream
|
||||||
|
from .ffmpeg_stream import FFMPEG_Stream
|
||||||
|
from .vlc_stream import VLC_Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
|
||||||
|
if stream_args["method"] == "ffmpeg":
|
||||||
|
self.method = FFMPEG_Stream(fhdhr, stream_args, tuner)
|
||||||
|
if stream_args["method"] == "vlc":
|
||||||
|
self.method = VLC_Stream(fhdhr, stream_args, tuner)
|
||||||
|
elif (stream_args["method"] == "direct" and
|
||||||
|
not self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
|
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||||
|
elif (stream_args["method"] == "direct" and
|
||||||
|
self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"]))):
|
||||||
|
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.method.get()
|
||||||
99
fHDHR/device/tuners/stream/direct_m3u8_stream.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
import sys
|
||||||
|
import time
|
||||||
|
import m3u8
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Direct_M3U8_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.chunksize = int(self.fhdhr.config.dict["direct_stream"]['chunksize'])
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
if not self.stream_args["duration"] == 0:
|
||||||
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Detected stream URL is m3u8: %s" % self.stream_args["true_content_type"])
|
||||||
|
|
||||||
|
channelUri = self.stream_args["channelUri"]
|
||||||
|
while True:
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Opening m3u8 for reading %s" % channelUri)
|
||||||
|
videoUrlM3u = m3u8.load(channelUri)
|
||||||
|
if len(videoUrlM3u.playlists):
|
||||||
|
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
|
||||||
|
channelUri = videoUrlM3u.playlists[0].absolute_uri
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
played_chunk_urls = []
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
playlist = m3u8.load(channelUri)
|
||||||
|
segments = playlist.segments
|
||||||
|
|
||||||
|
if len(played_chunk_urls):
|
||||||
|
newsegments = 0
|
||||||
|
for segment in segments:
|
||||||
|
if segment.absolute_uri not in played_chunk_urls:
|
||||||
|
newsegments += 1
|
||||||
|
self.fhdhr.logger.info("Refreshing m3u8, Loaded %s new segments." % str(newsegments))
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Loaded %s segments." % str(len(segments)))
|
||||||
|
|
||||||
|
if playlist.keys != [None]:
|
||||||
|
keys = [{"url": key.absolute_uri, "method": key.method, "iv": key.iv} for key in playlist.keys if key]
|
||||||
|
else:
|
||||||
|
keys = [None for i in range(0, len(segments))]
|
||||||
|
|
||||||
|
for segment, key in zip(segments, keys):
|
||||||
|
chunkurl = segment.absolute_uri
|
||||||
|
|
||||||
|
if chunkurl and chunkurl not in played_chunk_urls:
|
||||||
|
played_chunk_urls.append(chunkurl)
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
if key:
|
||||||
|
if key["url"]:
|
||||||
|
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||||
|
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||||
|
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
|
||||||
|
chunk = cryptor.decrypt(chunk)
|
||||||
|
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s: %s" % (len(played_chunk_urls), chunk_size, chunkurl))
|
||||||
|
yield chunk
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
63
fHDHR/device/tuners/stream/direct_stream.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Direct_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.chunksize = int(self.fhdhr.config.dict["direct_stream"]['chunksize'])
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
if not self.stream_args["duration"] == 0:
|
||||||
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["channelUri"]))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(self.stream_args["channelUri"], stream=True)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
chunk_counter = 1
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
for chunk in req.iter_content(chunk_size=self.chunksize):
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
req.close()
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s" % (chunk_counter, chunk_size))
|
||||||
|
yield chunk
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
|
||||||
|
chunk_counter += 1
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
req.close()
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
130
fHDHR/device/tuners/stream/ffmpeg_stream.py
Normal file
@ -0,0 +1,130 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class FFMPEG_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["ffmpeg"]["bytes_per_read"])
|
||||||
|
self.ffmpeg_command = self.ffmpeg_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(self.ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = ffmpeg_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def ffmpeg_command_assemble(self, stream_args):
|
||||||
|
ffmpeg_command = [
|
||||||
|
self.fhdhr.config.dict["ffmpeg"]["path"],
|
||||||
|
"-i", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_duration(stream_args))
|
||||||
|
ffmpeg_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_loglevel())
|
||||||
|
ffmpeg_command.extend(["pipe:stdout"])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_duration(self, stream_args):
|
||||||
|
ffmpeg_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
ffmpeg_command.extend(["-t", str(stream_args["duration"])])
|
||||||
|
else:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-reconnect", "1",
|
||||||
|
"-reconnect_at_eof", "1",
|
||||||
|
"-reconnect_streamed", "1",
|
||||||
|
"-reconnect_delay_max", "2",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_loglevel(self):
|
||||||
|
ffmpeg_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "debug",
|
||||||
|
"info": "info",
|
||||||
|
"error": "error",
|
||||||
|
"warning": "warning",
|
||||||
|
"critical": "fatal",
|
||||||
|
}
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
ffmpeg_command.extend(["-nostats", "-hide_banner"])
|
||||||
|
ffmpeg_command.extend(["-loglevel", loglevel_dict[log_level]])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
ffmpeg_command = []
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-c", "copy",
|
||||||
|
"-f", "mpegts",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
121
fHDHR/device/tuners/stream/vlc_stream.py
Normal file
@ -0,0 +1,121 @@
|
|||||||
|
import sys
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class VLC_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["vlc"]["bytes_per_read"])
|
||||||
|
self.vlc_command = self.vlc_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(self.vlc_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
chunk = vlc_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield chunk
|
||||||
|
chunk_size = int(sys.getsizeof(chunk))
|
||||||
|
self.tuner.add_downloaded_size(chunk_size)
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def vlc_command_assemble(self, stream_args):
|
||||||
|
vlc_command = [
|
||||||
|
self.fhdhr.config.dict["vlc"]["path"],
|
||||||
|
"-I", "dummy", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
vlc_command.extend(self.vlc_duration(stream_args))
|
||||||
|
vlc_command.extend(self.vlc_loglevel())
|
||||||
|
vlc_command.extend(["--sout"])
|
||||||
|
vlc_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_duration(self, stream_args):
|
||||||
|
vlc_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
vlc_command.extend(["--run-time=%s" % str(stream_args["duration"])])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_loglevel(self):
|
||||||
|
vlc_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "3",
|
||||||
|
"info": "0",
|
||||||
|
"error": "1",
|
||||||
|
"warning": "2",
|
||||||
|
"critical": "1",
|
||||||
|
}
|
||||||
|
vlc_command.extend(["--log-verbose=", loglevel_dict[log_level]])
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
vlc_command.extend(["--quiet"])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
vlc_command = []
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
vlc_transcode_string = "#std{mux=ts,access=file,dst=-}"
|
||||||
|
return [vlc_transcode_string]
|
||||||
|
|
||||||
|
'#transcode{vcodec=mp2v,vb=4096,acodec=mp2a,ab=192,scale=1,channels=2,deinterlace}:std{access=file,mux=ts,dst=-"}'
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
vlc_command.extend([])
|
||||||
|
|
||||||
|
return vlc_command
|
||||||
101
fHDHR/device/tuners/tuner.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
from .stream import Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
def __init__(self, fhdhr, inum, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.number = inum
|
||||||
|
self.epg = epg
|
||||||
|
|
||||||
|
self.tuner_lock = threading.Lock()
|
||||||
|
self.set_off_status()
|
||||||
|
|
||||||
|
self.chanscan_url = "%s/api/channels?method=scan" % (self.fhdhr.api.base)
|
||||||
|
self.close_url = "%s/api/tuners?method=close&tuner=%s" % (self.fhdhr.api.base, str(self.number))
|
||||||
|
|
||||||
|
def channel_scan(self):
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.fhdhr.logger.error("Tuner #%s is not available." % str(self.number))
|
||||||
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
|
||||||
|
if self.status["status"] == "Scanning":
|
||||||
|
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
||||||
|
else:
|
||||||
|
|
||||||
|
self.tuner_lock.acquire()
|
||||||
|
self.status["status"] = "Scanning"
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan." % str(self.number))
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
chanscan = multiprocessing.Process(target=self.runscan)
|
||||||
|
elif self.fhdhr.config.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
chanscan = threading.Thread(target=self.runscan)
|
||||||
|
if self.fhdhr.config.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
chanscan.start()
|
||||||
|
|
||||||
|
def runscan(self):
|
||||||
|
self.fhdhr.web.session.get(self.chanscan_url)
|
||||||
|
self.fhdhr.logger.info("Requested Channel Scan Complete.")
|
||||||
|
self.fhdhr.web.session.get(self.close_url)
|
||||||
|
|
||||||
|
def add_downloaded_size(self, bytes_count):
|
||||||
|
if "downloaded" in list(self.status.keys()):
|
||||||
|
self.status["downloaded"] += bytes_count
|
||||||
|
|
||||||
|
def grab(self, channel_number):
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.fhdhr.logger.error("Tuner #" + str(self.number) + " is not available.")
|
||||||
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
self.tuner_lock.acquire()
|
||||||
|
self.status["status"] = "Acquired"
|
||||||
|
self.status["channel"] = channel_number
|
||||||
|
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.set_off_status()
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.tuner_lock.release()
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(self.number) + " Released.")
|
||||||
|
|
||||||
|
def get_status(self):
|
||||||
|
current_status = self.status.copy()
|
||||||
|
if current_status["status"] == "Active":
|
||||||
|
current_status["Play Time"] = str(
|
||||||
|
humanized_time(
|
||||||
|
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||||
|
current_status["time_start"] = str(current_status["time_start"])
|
||||||
|
current_status["epg"] = self.epg.whats_on_now(current_status["channel"])
|
||||||
|
return current_status
|
||||||
|
|
||||||
|
def set_off_status(self):
|
||||||
|
self.status = {"status": "Inactive"}
|
||||||
|
|
||||||
|
def get_stream(self, stream_args, tuner):
|
||||||
|
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||||
|
return stream.get()
|
||||||
|
|
||||||
|
def set_status(self, stream_args):
|
||||||
|
if self.status["status"] != "Active":
|
||||||
|
self.status = {
|
||||||
|
"status": "Active",
|
||||||
|
"clients": [],
|
||||||
|
"clients_id": [],
|
||||||
|
"method": stream_args["method"],
|
||||||
|
"accessed": [stream_args["accessed"]],
|
||||||
|
"channel": stream_args["channel"],
|
||||||
|
"proxied_url": stream_args["channelUri"],
|
||||||
|
"time_start": datetime.datetime.utcnow(),
|
||||||
|
"downloaded": 0
|
||||||
|
}
|
||||||
|
if stream_args["client"] not in self.status["clients"]:
|
||||||
|
self.status["clients"].append(stream_args["client"])
|
||||||
|
if stream_args["client_id"] not in self.status["clients_id"]:
|
||||||
|
self.status["clients_id"].append(stream_args["client_id"])
|
||||||
40
fHDHR/exceptions/__init__.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
|
||||||
|
class TunerError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'TunerError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class OriginSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'OriginSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class EPGSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'EPGSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'ConfigurationError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationNotFound(ConfigurationError):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(ConfigurationNotFound, self).__init__(None)
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'Unable to find the configuration file %s' % self.filename
|
||||||
92
fHDHR/originwrapper/__init__.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginEPG_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OriginServiceWrapper():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
|
self.servicename = fhdhr.config.dict["main"]["servicename"]
|
||||||
|
|
||||||
|
self.setup_success = None
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.originservice = self.origin.OriginService(self.fhdhr)
|
||||||
|
self.setup_success = True
|
||||||
|
self.fhdhr.logger.info("%s Setup Success" % self.servicename)
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.fhdhr.logger.error(e)
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
self.channels = self.origin.OriginChannels(self.fhdhr, self.originservice)
|
||||||
|
self.epg = self.origin.OriginEPG(self.fhdhr)
|
||||||
|
else:
|
||||||
|
self.channels = OriginChannels_StandIN()
|
||||||
|
self.epg = OriginEPG_StandIN()
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return self.channels.get_channels()
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return self.channels.get_channel_stream(chandict)
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return self.epg.update_epg(channels)
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
status_dict = {
|
||||||
|
"Setup": "Success",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_status_dict = self.origin.get_status_dict()
|
||||||
|
for status_key in list(full_status_dict.keys()):
|
||||||
|
status_dict[status_key] = full_status_dict[status_key]
|
||||||
|
return status_dict
|
||||||
|
except AttributeError:
|
||||||
|
return status_dict
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"Setup": "Failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr." + name)
|
||||||
|
if hasattr(self.originservice, name):
|
||||||
|
return eval("self.originservice." + name)
|
||||||
|
elif hasattr(self.channels, name):
|
||||||
|
return eval("self.channels." + name)
|
||||||
|
elif hasattr(self.epg, name):
|
||||||
|
return eval("self.epg." + name)
|
||||||
|
else:
|
||||||
|
raise AttributeError(name)
|
||||||
136
fHDHR/tools/__init__.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
import os
|
||||||
|
import re
|
||||||
|
import ast
|
||||||
|
import requests
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
UNARY_OPS = (ast.UAdd, ast.USub)
|
||||||
|
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
|
||||||
|
|
||||||
|
|
||||||
|
def is_docker():
|
||||||
|
path = "/proc/self/cgroup"
|
||||||
|
if not os.path.isfile(path):
|
||||||
|
return False
|
||||||
|
with open(path) as f:
|
||||||
|
for line in f:
|
||||||
|
if re.match("\d+:[\w=]+:/docker(-[ce]e)?/\w+", line):
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def sub_el(parent, sub_el_item_name, text=None, **kwargs):
|
||||||
|
el = xml.etree.ElementTree.SubElement(parent, sub_el_item_name, **kwargs)
|
||||||
|
if text:
|
||||||
|
el.text = text
|
||||||
|
return el
|
||||||
|
|
||||||
|
|
||||||
|
def xmldictmaker(inputdict, req_items, list_items=[], str_items=[]):
|
||||||
|
xml_dict = {}
|
||||||
|
|
||||||
|
for origitem in list(inputdict.keys()):
|
||||||
|
xml_dict[origitem] = inputdict[origitem]
|
||||||
|
|
||||||
|
for req_item in req_items:
|
||||||
|
if req_item not in list(inputdict.keys()):
|
||||||
|
xml_dict[req_item] = None
|
||||||
|
if not xml_dict[req_item]:
|
||||||
|
if req_item in list_items:
|
||||||
|
xml_dict[req_item] = []
|
||||||
|
elif req_item in str_items:
|
||||||
|
xml_dict[req_item] = ""
|
||||||
|
|
||||||
|
return xml_dict
|
||||||
|
|
||||||
|
|
||||||
|
def is_arithmetic(s):
|
||||||
|
def _is_arithmetic(node):
|
||||||
|
if isinstance(node, ast.Num):
|
||||||
|
return True
|
||||||
|
elif isinstance(node, ast.Expression):
|
||||||
|
return _is_arithmetic(node.body)
|
||||||
|
elif isinstance(node, ast.UnaryOp):
|
||||||
|
valid_op = isinstance(node.op, UNARY_OPS)
|
||||||
|
return valid_op and _is_arithmetic(node.operand)
|
||||||
|
elif isinstance(node, ast.BinOp):
|
||||||
|
valid_op = isinstance(node.op, BINARY_OPS)
|
||||||
|
return valid_op and _is_arithmetic(node.left) and _is_arithmetic(node.right)
|
||||||
|
else:
|
||||||
|
raise ValueError('Unsupported type {}'.format(node))
|
||||||
|
|
||||||
|
try:
|
||||||
|
return _is_arithmetic(ast.parse(s, mode='eval'))
|
||||||
|
except (SyntaxError, ValueError):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def isint(x):
|
||||||
|
try:
|
||||||
|
a = float(x)
|
||||||
|
b = int(a)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return a == b
|
||||||
|
|
||||||
|
|
||||||
|
def isfloat(x):
|
||||||
|
try:
|
||||||
|
float(x)
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
def hours_between_datetime(first_time, later_time):
|
||||||
|
timebetween = first_time - later_time
|
||||||
|
return (timebetween.total_seconds() / 60 / 60)
|
||||||
|
|
||||||
|
|
||||||
|
def humanized_filesize(size, decimal_places=2):
|
||||||
|
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']:
|
||||||
|
if size < 1024.0 or unit == 'YiB':
|
||||||
|
break
|
||||||
|
size /= 1024.0
|
||||||
|
return f"{size:.{decimal_places}f} {unit}"
|
||||||
|
|
||||||
|
|
||||||
|
def humanized_time(countdownseconds):
|
||||||
|
time = float(countdownseconds)
|
||||||
|
if time == 0:
|
||||||
|
return "just now"
|
||||||
|
year = time // (365 * 24 * 3600)
|
||||||
|
time = time % (365 * 24 * 3600)
|
||||||
|
day = time // (24 * 3600)
|
||||||
|
time = time % (24 * 3600)
|
||||||
|
time = time % (24 * 3600)
|
||||||
|
hour = time // 3600
|
||||||
|
time %= 3600
|
||||||
|
minute = time // 60
|
||||||
|
time %= 60
|
||||||
|
second = time
|
||||||
|
displaymsg = None
|
||||||
|
timearray = ['year', 'day', 'hour', 'minute', 'second']
|
||||||
|
for x in timearray:
|
||||||
|
currenttimevar = eval(x)
|
||||||
|
if currenttimevar >= 1:
|
||||||
|
timetype = x
|
||||||
|
if currenttimevar > 1:
|
||||||
|
timetype = str(x+"s")
|
||||||
|
if displaymsg:
|
||||||
|
displaymsg = str(displaymsg + " " + str(int(currenttimevar)) + " " + timetype)
|
||||||
|
else:
|
||||||
|
displaymsg = str(str(int(currenttimevar)) + " " + timetype)
|
||||||
|
if not displaymsg:
|
||||||
|
return "just now"
|
||||||
|
return displaymsg
|
||||||
|
# just for ignoring a pep error
|
||||||
|
year, day, hour, minute, second
|
||||||
|
|
||||||
|
|
||||||
|
class WebReq():
|
||||||
|
def __init__(self):
|
||||||
|
self.session = requests.Session()
|
||||||
|
self.exceptions = requests.exceptions
|
||||||
100
fHDHR_web/__init__.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import Flask, request
|
||||||
|
|
||||||
|
from .pages import fHDHR_Pages
|
||||||
|
from .files import fHDHR_Files
|
||||||
|
from .hdhr import fHDHR_HDHR
|
||||||
|
from .rmg import fHDHR_RMG
|
||||||
|
from .api import fHDHR_API
|
||||||
|
|
||||||
|
|
||||||
|
fHDHR_web_VERSION = "v0.4.0-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HTTP_Server():
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.iptvorg-us_folder = fhdhr.config.internal["paths"]["www_iptvorg-uss_dir"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading Flask.")
|
||||||
|
|
||||||
|
self.app = Flask("fHDHR", iptvorg-us_folder=self.iptvorg-us_folder)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Pages Endpoints.")
|
||||||
|
self.pages = fHDHR_Pages(fhdhr)
|
||||||
|
self.add_endpoints(self.pages, "pages")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Files Endpoints.")
|
||||||
|
self.files = fHDHR_Files(fhdhr)
|
||||||
|
self.add_endpoints(self.files, "files")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP HDHR Endpoints.")
|
||||||
|
self.hdhr = fHDHR_HDHR(fhdhr)
|
||||||
|
self.add_endpoints(self.hdhr, "hdhr")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP RMG Endpoints.")
|
||||||
|
self.rmg = fHDHR_RMG(fhdhr)
|
||||||
|
self.add_endpoints(self.rmg, "rmg")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP API Endpoints.")
|
||||||
|
self.api = fHDHR_API(fhdhr)
|
||||||
|
self.add_endpoints(self.api, "api")
|
||||||
|
|
||||||
|
self.app.before_request(self.before_request)
|
||||||
|
self.app.after_request(self.after_request)
|
||||||
|
self.app.before_first_request(self.before_first_request)
|
||||||
|
|
||||||
|
def before_first_request(self):
|
||||||
|
self.fhdhr.logger.info("HTTP Server Online.")
|
||||||
|
|
||||||
|
def before_request(self):
|
||||||
|
self.fhdhr.logger.debug("Client %s requested %s Opening" % (request.method, request.path))
|
||||||
|
|
||||||
|
def after_request(self, response):
|
||||||
|
self.fhdhr.logger.debug("Client %s requested %s Closing" % (request.method, request.path))
|
||||||
|
return response
|
||||||
|
|
||||||
|
def add_endpoints(self, index_list, index_name):
|
||||||
|
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||||
|
for item in item_list:
|
||||||
|
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
|
||||||
|
if isinstance(endpoints, str):
|
||||||
|
endpoints = [endpoints]
|
||||||
|
handler = eval("self." + str(index_name) + "." + str(item))
|
||||||
|
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
|
||||||
|
try:
|
||||||
|
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_methods = ['GET']
|
||||||
|
self.fhdhr.logger.info("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||||
|
for endpoint in endpoints:
|
||||||
|
self.add_endpoint(endpoint=endpoint,
|
||||||
|
endpoint_name=endpoint_name,
|
||||||
|
handler=handler,
|
||||||
|
methods=endpoint_methods)
|
||||||
|
|
||||||
|
def isapath(self, item):
|
||||||
|
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
|
||||||
|
if item in not_a_page_list:
|
||||||
|
return False
|
||||||
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||||
|
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
|
||||||
|
self.http = WSGIServer(self.fhdhr.api.address_tuple,
|
||||||
|
self.app.wsgi_app,
|
||||||
|
log=self.fhdhr.logger)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.http.serve_forever()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.http.stop()
|
||||||
32
fHDHR_web/api/__init__.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
|
||||||
|
from .root_url import Root_URL
|
||||||
|
|
||||||
|
from .cluster import Cluster
|
||||||
|
from .settings import Settings
|
||||||
|
from .channels import Channels
|
||||||
|
from .xmltv import xmlTV
|
||||||
|
from .m3u import M3U
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .debug import Debug_JSON
|
||||||
|
|
||||||
|
from .images import Images
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.root_url = Root_URL(fhdhr)
|
||||||
|
|
||||||
|
self.cluster = Cluster(fhdhr)
|
||||||
|
self.settings = Settings(fhdhr)
|
||||||
|
self.channels = Channels(fhdhr)
|
||||||
|
self.xmltv = xmlTV(fhdhr)
|
||||||
|
self.m3u = M3U(fhdhr)
|
||||||
|
self.epg = EPG(fhdhr)
|
||||||
|
self.tuners = Tuners(fhdhr)
|
||||||
|
self.debug = Debug_JSON(fhdhr)
|
||||||
|
|
||||||
|
self.images = Images(fhdhr)
|
||||||
111
fHDHR_web/api/channels.py
Normal file
@ -0,0 +1,111 @@
|
|||||||
|
from flask import request, redirect, Response, abort
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
endpoints = ["/api/channels"]
|
||||||
|
endpoint_name = "api_channels"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
channels_info = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url
|
||||||
|
channel_dict["stream_url"] = channel_obj.stream_url
|
||||||
|
channels_info.append(channel_dict)
|
||||||
|
channels_info_json = json.dumps(channels_info, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_info_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "favorite":
|
||||||
|
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if not channel:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if channel.startstwith(tuple(["+", "-", "x"])):
|
||||||
|
|
||||||
|
channel_method = channel[0]
|
||||||
|
channel_number = channel[1:]
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if channel_method == "+":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "-":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "x":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown favorite command " + request.args['favorite'])
|
||||||
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
|
elif method in ["enable", "disable"]:
|
||||||
|
channel = request.args.get('channel', default=None, type=str)
|
||||||
|
if channel == "all":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement_all(method)
|
||||||
|
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
else:
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel, method)
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
channel_id = request.form.get('id', None)
|
||||||
|
updatedict = {}
|
||||||
|
for key in list(request.form.keys()):
|
||||||
|
if key != "id":
|
||||||
|
if key in ["name", "callsign", "thumbnail"]:
|
||||||
|
updatedict[key] = str(request.form.get(key))
|
||||||
|
elif key in ["number"]:
|
||||||
|
updatedict[key] = float(request.form.get(key))
|
||||||
|
elif key in ["enabled"]:
|
||||||
|
confvalue = request.form.get(key)
|
||||||
|
if str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
updatedict[key] = confvalue
|
||||||
|
elif key in ["favorite", "HD"]:
|
||||||
|
updatedict[key] = int(request.form.get(key))
|
||||||
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.channels.get_channels(forceupdate=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
if method == "scan":
|
||||||
|
return redirect('/lineup_status.json')
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
56
fHDHR_web/api/cluster.py
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster():
|
||||||
|
endpoints = ["/api/cluster"]
|
||||||
|
endpoint_name = "api_cluster"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
location = request.args.get("location", default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||||
|
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.ssdp.m_search()
|
||||||
|
|
||||||
|
elif method == 'add':
|
||||||
|
self.fhdhr.device.cluster.add(location)
|
||||||
|
elif method == 'del':
|
||||||
|
self.fhdhr.device.cluster.remove(location)
|
||||||
|
|
||||||
|
elif method == 'sync':
|
||||||
|
self.fhdhr.device.cluster.sync(location)
|
||||||
|
|
||||||
|
elif method == 'leave':
|
||||||
|
self.fhdhr.device.cluster.leave()
|
||||||
|
elif method == 'disconnect':
|
||||||
|
self.fhdhr.device.cluster.disconnect()
|
||||||
|
|
||||||
|
elif method == 'alive':
|
||||||
|
self.fhdhr.device.ssdp.do_alive(forcealive=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
29
fHDHR_web/api/debug.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from flask import request, Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Debug_JSON():
|
||||||
|
endpoints = ["/api/debug"]
|
||||||
|
endpoint_name = "api_debug"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
debugjson = {
|
||||||
|
"base_url": base_url,
|
||||||
|
"total channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(),
|
||||||
|
}
|
||||||
|
cluster_json = json.dumps(debugjson, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
59
fHDHR_web/api/epg.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/epg"]
|
||||||
|
endpoint_name = "api_epg"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
epgdict[chan_obj.dict["number"]] = epgdict.pop(c)
|
||||||
|
epgdict[chan_obj.dict["number"]]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["number"] = chan_obj.dict["number"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
|
||||||
|
epg_json = json.dumps(epgdict, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=epg_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
44
fHDHR_web/api/images.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from flask import request, Response, abort
|
||||||
|
|
||||||
|
|
||||||
|
class Images():
|
||||||
|
endpoints = ["/api/images"]
|
||||||
|
endpoint_name = "api_images"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
image = None
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
if method == "generate":
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
message = request.args.get('message', default="Unknown Request", type=str)
|
||||||
|
image = self.fhdhr.device.images.generate_image(image_type, message)
|
||||||
|
|
||||||
|
elif method == "get":
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
|
if source in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
|
if image_id:
|
||||||
|
image = self.fhdhr.device.images.get_epg_image(image_type, image_id)
|
||||||
|
|
||||||
|
else:
|
||||||
|
image = self.fhdhr.device.images.generate_image("content", "Unknown Request")
|
||||||
|
|
||||||
|
if image:
|
||||||
|
imagemimetype = self.fhdhr.device.images.get_image_type(image)
|
||||||
|
return Response(image, content_type=imagemimetype, direct_passthrough=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid image request")
|
||||||
89
fHDHR_web/api/m3u.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class M3U():
|
||||||
|
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||||
|
endpoint_name = "api_m3u"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
FORMAT_DESCRIPTOR + " " +
|
||||||
|
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
||||||
|
"x-tvg-url=\"" + xmltvurl + "\"")
|
||||||
|
)
|
||||||
|
|
||||||
|
channel_items = []
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
fileName = "channels.m3u"
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
||||||
|
fileName = str(channel_obj.number) + ".m3u"
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
for channel_obj in channel_items:
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_obj.dict['origin_id'])))
|
||||||
|
else:
|
||||||
|
logourl = channel_obj.thumbnail
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
RECORD_MARKER + ":0" + " " +
|
||||||
|
"channelID=\"" + str(channel_obj.dict['origin_id']) + "\" " +
|
||||||
|
"tvg-chno=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-name=\"" + str(channel_obj.dict['name']) + "\" " +
|
||||||
|
"tvg-id=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-logo=\"" + logourl + "\" " +
|
||||||
|
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "\"," + str(channel_obj.dict['name']))
|
||||||
|
)
|
||||||
|
|
||||||
|
fakefile.write("%s%s\n" % (base_url, channel_obj.stream_url))
|
||||||
|
|
||||||
|
channels_m3u = fakefile.getvalue()
|
||||||
|
|
||||||
|
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
|
||||||
|
resp.headers["content-disposition"] = "attachment; filename=" + fileName
|
||||||
|
return resp
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
32
fHDHR_web/api/root_url.py
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
from flask import redirect, request
|
||||||
|
|
||||||
|
|
||||||
|
class Root_URL():
|
||||||
|
endpoints = ["/"]
|
||||||
|
endpoint_name = "page_root_html"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
|
||||||
|
# Client Devices Discovering Device Information
|
||||||
|
if not user_agent or str(user_agent).lower().startswith("plexmediaserver"):
|
||||||
|
|
||||||
|
# Plex Remote Media Grabber redirect
|
||||||
|
if self.fhdhr.config.dict["rmg"]["enabled"] and str(user_agent).lower().startswith("plexmediaserver"):
|
||||||
|
return redirect("/rmg")
|
||||||
|
|
||||||
|
# Client Device is looking for HDHR type device
|
||||||
|
else:
|
||||||
|
return redirect("/hdhr/device.xml")
|
||||||
|
|
||||||
|
# Anything Else is likely a Web Browser
|
||||||
|
else:
|
||||||
|
return redirect("/index")
|
||||||
40
fHDHR_web/api/settings.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from flask import request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Settings():
|
||||||
|
endpoints = ["/api/settings"]
|
||||||
|
endpoint_name = "api_settings"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "update":
|
||||||
|
config_section = request.form.get('config_section', None)
|
||||||
|
config_name = request.form.get('config_name', None)
|
||||||
|
config_value = request.form.get('config_value', None)
|
||||||
|
|
||||||
|
if not config_section or not config_name or not config_value:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if config_section == "origin":
|
||||||
|
config_section = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.config.write(config_section, config_name, config_value)
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
125
fHDHR_web/api/tuners.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
from flask import Response, request, redirect, abort, stream_with_context
|
||||||
|
import urllib.parse
|
||||||
|
import uuid
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners():
|
||||||
|
endpoints = ["/api/tuners"]
|
||||||
|
endpoint_name = "api_tuners"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
client_address = request.remote_addr
|
||||||
|
|
||||||
|
accessed_url = request.args.get('accessed', default=request.url, type=str)
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
tuner_number = request.args.get('tuner', None, type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
|
||||||
|
channel_number = request.args.get('channel', None, type=str)
|
||||||
|
if not channel_number:
|
||||||
|
return "Missing Channel"
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
channel_dict = self.fhdhr.device.channels.get_channel_dict("number", channel_number)
|
||||||
|
if not channel_dict["enabled"]:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
valid_transcode_types = [None, "heavy", "mobile", "internet720", "internet480", "internet360", "internet240"]
|
||||||
|
if transcode not in valid_transcode_types:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
stream_args = {
|
||||||
|
"channel": channel_number,
|
||||||
|
"method": method,
|
||||||
|
"duration": duration,
|
||||||
|
"transcode": transcode,
|
||||||
|
"accessed": accessed_url,
|
||||||
|
"client": client_address,
|
||||||
|
"client_id": str(client_address) + "_" + str(uuid.uuid4())
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available(channel_number)
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number, channel_number)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tunernum)]
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
tuner.close()
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(tunernum) + " to be used for stream.")
|
||||||
|
tuner.set_status(stream_args)
|
||||||
|
|
||||||
|
if stream_args["method"] == "direct":
|
||||||
|
return Response(tuner.get_stream(stream_args, tuner), content_type=stream_args["content_type"], direct_passthrough=True)
|
||||||
|
elif stream_args["method"] in ["ffmpeg", "vlc"]:
|
||||||
|
return Response(stream_with_context(tuner.get_stream(stream_args, tuner)), mimetype=stream_args["content_type"])
|
||||||
|
|
||||||
|
elif method == "close":
|
||||||
|
|
||||||
|
if not tuner_number or str(tuner_number) not in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
return "%s Invalid tuner" % str(tuner_number)
|
||||||
|
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tuner_number)]
|
||||||
|
tuner.close()
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
|
||||||
|
if not tuner_number:
|
||||||
|
self.fhdhr.device.tuners.tuner_scan()
|
||||||
|
else:
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[str(tuner_number)]
|
||||||
|
tuner.channel_scan()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
164
fHDHR_web/api/xmltv.py
Normal file
@ -0,0 +1,164 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
from io import BytesIO
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/xmltv", "/xmltv.xml"]
|
||||||
|
endpoint_name = "api_xmltv"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||||
|
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
||||||
|
if DeviceAuth != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||||
|
return "not subscribed"
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["epg"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
xmltv_xml = self.create_xmltv(base_url, epgdict, source)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=xmltv_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
|
|
||||||
|
def xmltv_headers(self):
|
||||||
|
"""This method creates the XML headers for our xmltv"""
|
||||||
|
xmltvgen = xml.etree.ElementTree.Element('tv')
|
||||||
|
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||||
|
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||||
|
xmltvgen.set('generator-info-url', 'fHDHR/' + self.fhdhr.config.dict["main"]["reponame"])
|
||||||
|
return xmltvgen
|
||||||
|
|
||||||
|
def xmltv_file(self, xmltvgen):
|
||||||
|
"""This method is used to close out the xml file"""
|
||||||
|
xmltvfile = BytesIO()
|
||||||
|
xmltvfile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
xmltvfile.write(xml.etree.ElementTree.tostring(xmltvgen, encoding='UTF-8'))
|
||||||
|
return xmltvfile.getvalue()
|
||||||
|
|
||||||
|
def xmltv_empty(self):
|
||||||
|
"""This method is called when creation of a full xmltv is not possible"""
|
||||||
|
return self.xmltv_file(self.xmltv_headers())
|
||||||
|
|
||||||
|
def create_xmltv(self, base_url, epgdict, source):
|
||||||
|
if not epgdict:
|
||||||
|
return self.xmltv_empty()
|
||||||
|
epgdict = epgdict.copy()
|
||||||
|
|
||||||
|
out = self.xmltv_headers()
|
||||||
|
|
||||||
|
if source in ["origin", "blocks", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"])
|
||||||
|
epgdict[chan_obj.dict["number"]] = epgdict.pop(c)
|
||||||
|
epgdict[chan_obj.dict["number"]]["name"] = chan_obj.dict["name"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["callsign"] = chan_obj.dict["callsign"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["number"] = chan_obj.dict["number"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["id"] = chan_obj.dict["origin_id"]
|
||||||
|
epgdict[chan_obj.dict["number"]]["thumbnail"] = chan_obj.thumbnail
|
||||||
|
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
|
||||||
|
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=channel&id=" + str(epgdict[c]['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||||
|
|
||||||
|
for channelnum in list(epgdict.keys()):
|
||||||
|
|
||||||
|
channel_listing = epgdict[channelnum]['listing']
|
||||||
|
|
||||||
|
for program in channel_listing:
|
||||||
|
|
||||||
|
prog_out = sub_el(out, 'programme',
|
||||||
|
start=program['time_start'],
|
||||||
|
stop=program['time_end'],
|
||||||
|
channel=str(channelnum))
|
||||||
|
|
||||||
|
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||||
|
|
||||||
|
for f in program['genres']:
|
||||||
|
sub_el(prog_out, 'category', lang='en', text=f)
|
||||||
|
sub_el(prog_out, 'genre', lang='en', text=f)
|
||||||
|
|
||||||
|
if program['seasonnumber'] and program['episodenumber']:
|
||||||
|
s_ = int(str(program['seasonnumber']), 10)
|
||||||
|
e_ = int(str(program['episodenumber']), 10)
|
||||||
|
sub_el(prog_out, 'episode-num', system='dd_progid',
|
||||||
|
text=str(program['id']))
|
||||||
|
sub_el(prog_out, 'episode-num', system='common',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||||
|
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||||
|
sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
|
||||||
|
if program["thumbnail"]:
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=content&id=" + str(program['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=content&message=" + urllib.parse.quote(program['title'])))
|
||||||
|
|
||||||
|
if program['rating']:
|
||||||
|
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||||
|
sub_el(rating_out, 'value', text=program['rating'])
|
||||||
|
|
||||||
|
if program['isnew']:
|
||||||
|
sub_el(prog_out, 'new')
|
||||||
|
|
||||||
|
return self.xmltv_file(out)
|
||||||
15
fHDHR_web/files/__init__.py
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .favicon_ico import Favicon_ICO
|
||||||
|
from .style_css import Style_CSS
|
||||||
|
from .device_xml import Device_XML
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Files():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
|
self.style = Style_CSS(fhdhr)
|
||||||
|
self.device_xml = Device_XML(fhdhr)
|
||||||
21
fHDHR_web/files/device_xml.py
Normal file
@ -0,0 +1,21 @@
|
|||||||
|
from flask import request, redirect
|
||||||
|
|
||||||
|
|
||||||
|
class Device_XML():
|
||||||
|
endpoints = ["/device.xml"]
|
||||||
|
endpoint_name = "file_device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
user_agent = request.headers.get('User-Agent')
|
||||||
|
if (self.fhdhr.config.dict["rmg"]["enabled"] and
|
||||||
|
str(user_agent).lower().startswith("plexmediaserver")):
|
||||||
|
return redirect("/rmg/device.xml")
|
||||||
|
else:
|
||||||
|
return redirect("/hdhr/device.xml")
|
||||||
18
fHDHR_web/files/favicon_ico.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Favicon_ICO():
|
||||||
|
endpoints = ["/favicon.ico"]
|
||||||
|
endpoint_name = "file_favicon_ico"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||||
|
'favicon.ico',
|
||||||
|
mimetype='image/vnd.microsoft.icon')
|
||||||
44
fHDHR_web/files/style_css.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from flask import Response
|
||||||
|
import pathlib
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class Style_CSS():
|
||||||
|
endpoints = ["/style.css"]
|
||||||
|
endpoint_name = "file_style_css"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.internal_style_file = pathlib.Path(
|
||||||
|
self.fhdhr.config.internal["paths"]["www_dir"]).joinpath('style.css')
|
||||||
|
|
||||||
|
self.internal_style = StringIO()
|
||||||
|
self.internal_style.write(open(self.internal_style_file).read())
|
||||||
|
|
||||||
|
self.pull_external_theme()
|
||||||
|
|
||||||
|
def pull_external_theme(self):
|
||||||
|
self.external_style = None
|
||||||
|
self.external_style_address = None
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"]:
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"].startswith(tuple(["http://", "https://"])):
|
||||||
|
css_req = self.fhdhr.web.session.get(self.fhdhr.config.dict["web_ui"]["theme"])
|
||||||
|
self.external_style = StringIO(css_req.text)
|
||||||
|
self.external_style_address = self.fhdhr.config.dict["web_ui"]["theme"]
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
main_output = StringIO()
|
||||||
|
|
||||||
|
main_output.write(self.internal_style.getvalue())
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"]:
|
||||||
|
if self.fhdhr.config.dict["web_ui"]["theme"] != self.external_style_address:
|
||||||
|
self.pull_external_theme()
|
||||||
|
if self.external_style:
|
||||||
|
main_output.write(self.external_style.getvalue())
|
||||||
|
|
||||||
|
return Response(status=200, response=main_output.getvalue(), mimetype="text/css")
|
||||||
31
fHDHR_web/hdhr/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .lineup_xml import Lineup_XML
|
||||||
|
from .discover_json import Discover_JSON
|
||||||
|
from .lineup_json import Lineup_JSON
|
||||||
|
from .lineup_status_json import Lineup_Status_JSON
|
||||||
|
|
||||||
|
from .lineup_post import Lineup_Post
|
||||||
|
from .device_xml import HDHR_Device_XML
|
||||||
|
|
||||||
|
from .auto import Auto
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HDHR():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.lineup_post = Lineup_Post(fhdhr)
|
||||||
|
|
||||||
|
self.device_xml = HDHR_Device_XML(fhdhr)
|
||||||
|
|
||||||
|
self.auto = Auto(fhdhr)
|
||||||
|
self.tuner = Tuner(fhdhr)
|
||||||
|
|
||||||
|
self.lineup_xml = Lineup_XML(fhdhr)
|
||||||
|
|
||||||
|
self.discover_json = Discover_JSON(fhdhr)
|
||||||
|
self.lineup_json = Lineup_JSON(fhdhr)
|
||||||
|
self.lineup_status_json = Lineup_Status_JSON(fhdhr)
|
||||||
45
fHDHR_web/hdhr/auto.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Auto():
|
||||||
|
endpoints = ['/auto/<channel>', '/hdhr/auto/<channel>']
|
||||||
|
endpoint_name = "hdhr_auto"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, channel, *args):
|
||||||
|
return self.get(channel, *args)
|
||||||
|
|
||||||
|
def get(self, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/tuners?method=%s" % (method)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
53
fHDHR_web/hdhr/device_xml.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class HDHR_Device_XML():
|
||||||
|
endpoints = ["/hdhr/device.xml"]
|
||||||
|
endpoint_name = "hdhr_device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
"""Device.xml referenced from SSDP"""
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('root')
|
||||||
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
|
sub_el(out, 'URLBase', "%s" % base_url)
|
||||||
|
|
||||||
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
|
sub_el(specVersion_out, 'major', "1")
|
||||||
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
|
device_out = sub_el(out, 'device')
|
||||||
|
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
|
|
||||||
|
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||||
|
sub_el(device_out, 'manufacturerURL', "https://github.com/fHDHR/%s" % self.fhdhr.config.dict["main"]["reponame"])
|
||||||
|
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.fhdhr.config.internal["versions"]["fHDHR"])
|
||||||
|
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
|
||||||
|
sub_el(device_out, 'UDN', "uuid:" + self.fhdhr.config.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
device_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=device_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
35
fHDHR_web/hdhr/discover_json.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Discover_JSON():
|
||||||
|
endpoints = ["/discover.json", "/hdhr/discover.json"]
|
||||||
|
endpoint_name = "hdhr_discover_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsondiscover = {
|
||||||
|
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||||
|
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||||
|
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||||
|
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||||
|
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||||
|
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||||
|
"BaseURL": "%s" % base_url,
|
||||||
|
"LineupURL": "%s/lineup.json" % base_url
|
||||||
|
}
|
||||||
|
discover_json = json.dumps(jsondiscover, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=discover_json,
|
||||||
|
mimetype='application/json')
|
||||||
37
fHDHR_web/hdhr/lineup_json.py
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_JSON():
|
||||||
|
endpoints = ["/lineup.json", "/hdhr/lineup.json"]
|
||||||
|
endpoint_name = "hdhr_lineup_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
jsonlineup = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
lineup_dict = channel_obj.lineup_dict
|
||||||
|
lineup_dict["URL"] = "%s%s" % (base_url, lineup_dict["URL"])
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
jsonlineup.append(lineup_dict)
|
||||||
|
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
60
fHDHR_web/hdhr/lineup_post.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from flask import request, abort, Response
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Post():
|
||||||
|
endpoints = ["/lineup.post", "/hdhr/lineup.post"]
|
||||||
|
endpoint_name = "hdhr_lineup_post"
|
||||||
|
endpoint_methods = ["POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if 'scan' in list(request.args.keys()):
|
||||||
|
|
||||||
|
if request.args['scan'] == 'start':
|
||||||
|
try:
|
||||||
|
self.fhdhr.device.tuners.tuner_scan()
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info(str(e))
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
elif request.args['scan'] == 'abort':
|
||||||
|
self.fhdhr.device.tuners.stop_tuner_scan()
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
|
||||||
|
return abort(200, "Not a valid scan command")
|
||||||
|
|
||||||
|
elif 'favorite' in list(request.args.keys()):
|
||||||
|
if request.args['favorite'].startstwith(tuple(["+", "-", "x"])):
|
||||||
|
|
||||||
|
channel_method = request.args['favorite'][0]
|
||||||
|
channel_number = request.args['favorite'][1:]
|
||||||
|
|
||||||
|
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number")]:
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
if channel_method == "+":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "-":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method)
|
||||||
|
elif channel_method == "x":
|
||||||
|
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle")
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown favorite command " + request.args['favorite'])
|
||||||
|
return abort(200, "Not a valid favorite command")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid command")
|
||||||
50
fHDHR_web/hdhr/lineup_status_json.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Status_JSON():
|
||||||
|
endpoints = ["/lineup_status.json", "/hdhr/lineup_status.json"]
|
||||||
|
endpoint_name = "hdhr_lineup_status_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
tuners_scanning = 0
|
||||||
|
for tuner_number in list(tuner_status.keys()):
|
||||||
|
if tuner_status[tuner_number]["status"] == "Scanning":
|
||||||
|
tuners_scanning += 1
|
||||||
|
|
||||||
|
if tuners_scanning:
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
elif not len(self.fhdhr.device.channels.list):
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
else:
|
||||||
|
jsonlineup = self.not_scanning()
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
def scan_in_progress(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": len(self.fhdhr.device.channels.list)
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def not_scanning(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
45
fHDHR_web/hdhr/lineup_xml.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_XML():
|
||||||
|
endpoints = ["/lineup.xml", "/hdhr/lineup.xml"]
|
||||||
|
endpoint_name = "hdhr_lineup_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
show = request.args.get('show', default="all", type=str)
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('Lineup')
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled or show == "found":
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
lineup_dict = channel_obj.lineup_dict
|
||||||
|
lineup_dict["URL"] = base_url + lineup_dict["URL"]
|
||||||
|
if show == "found" and channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 1
|
||||||
|
elif show == "found" and not channel_obj.enabled:
|
||||||
|
lineup_dict["Enabled"] = 0
|
||||||
|
for key in list(lineup_dict.keys()):
|
||||||
|
sub_el(program_out, str(key), str(lineup_dict[key]))
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
lineup_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
47
fHDHR_web/hdhr/tuner.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
endpoints = ['/tuner<tuner_number>/<channel>', '/hdhr/tuner<tuner_number>/<channel>']
|
||||||
|
endpoint_name = "hdhr_tuner"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, tuner_number, channel, *args):
|
||||||
|
return self.get(tuner_number, channel, *args)
|
||||||
|
|
||||||
|
def get(self, tuner_number, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/tuners?method=%s" % (method)
|
||||||
|
|
||||||
|
redirect_url += "&tuner=%s" % str(tuner_number)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
31
fHDHR_web/pages/__init__.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .index_html import Index_HTML
|
||||||
|
from .origin_html import Origin_HTML
|
||||||
|
from .channels_html import Channels_HTML
|
||||||
|
from .guide_html import Guide_HTML
|
||||||
|
from .cluster_html import Cluster_HTML
|
||||||
|
from .tuners_html import Tuners_HTML
|
||||||
|
from .xmltv_html import xmlTV_HTML
|
||||||
|
from .version_html import Version_HTML
|
||||||
|
from .diagnostics_html import Diagnostics_HTML
|
||||||
|
from .settings_html import Settings_HTML
|
||||||
|
from .channels_editor import Channels_Editor_HTML
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Pages():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.index_html = Index_HTML(fhdhr)
|
||||||
|
self.origin_html = Origin_HTML(fhdhr)
|
||||||
|
self.channels_html = Channels_HTML(fhdhr)
|
||||||
|
self.channels_editor = Channels_Editor_HTML(fhdhr)
|
||||||
|
self.guide_html = Guide_HTML(fhdhr)
|
||||||
|
self.cluster_html = Cluster_HTML(fhdhr)
|
||||||
|
self.tuners_html = Tuners_HTML(fhdhr)
|
||||||
|
self.xmltv_html = xmlTV_HTML(fhdhr)
|
||||||
|
self.version_html = Version_HTML(fhdhr)
|
||||||
|
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
||||||
|
self.settings_html = Settings_HTML(fhdhr)
|
||||||
23
fHDHR_web/pages/channels_editor.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Channels_Editor_HTML():
|
||||||
|
endpoints = ["/channels_editor", "/channels_editor.html"]
|
||||||
|
endpoint_name = "page_channels_editor_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
channelslist = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url
|
||||||
|
channelslist.append(channel_dict)
|
||||||
|
|
||||||
|
return render_iptvorg-us('channels_editor.html', request=request, fhdhr=self.fhdhr, channelslist=channelslist)
|
||||||
30
fHDHR_web/pages/channels_html.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Channels_HTML():
|
||||||
|
endpoints = ["/channels", "/channels.html"]
|
||||||
|
endpoint_name = "page_channels_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
channels_dict = {
|
||||||
|
"Total Channels": len(list(self.fhdhr.device.channels.list.keys())),
|
||||||
|
"Enabled": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
channelslist = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url
|
||||||
|
channelslist.append(channel_dict)
|
||||||
|
if channel_dict["enabled"]:
|
||||||
|
channels_dict["Enabled"] += 1
|
||||||
|
|
||||||
|
return render_iptvorg-us('channels.html', request=request, fhdhr=self.fhdhr, channelslist=channelslist, channels_dict=channels_dict, list=list)
|
||||||
50
fHDHR_web/pages/cluster_html.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster_HTML():
|
||||||
|
endpoints = ["/cluster", "/cluster.html"]
|
||||||
|
endpoint_name = "page_cluster_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.location_dict = {
|
||||||
|
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"location": self.fhdhr.api.base,
|
||||||
|
"joined": "N/A",
|
||||||
|
"url_query": self.fhdhr.api.base_quoted
|
||||||
|
}
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
locations_list = []
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
locations_list.append(self.location_dict)
|
||||||
|
|
||||||
|
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
|
||||||
|
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
||||||
|
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
location_name = location_info["FriendlyName"]
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
location_dict = {
|
||||||
|
"name": location_name,
|
||||||
|
"location": location,
|
||||||
|
"joined": str(fhdhr_list[location]["Joined"]),
|
||||||
|
"url_query": urllib.parse.quote(location)
|
||||||
|
}
|
||||||
|
locations_list.append(location_dict)
|
||||||
|
|
||||||
|
return render_iptvorg-us('cluster.html', request=request, fhdhr=self.fhdhr, locations_list=locations_list)
|
||||||
125
fHDHR_web/pages/diagnostics_html.py
Normal file
@ -0,0 +1,125 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Diagnostics_HTML():
|
||||||
|
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||||
|
endpoint_name = "page_diagnostics_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
button_list = []
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Debug Json",
|
||||||
|
"hdhr": None,
|
||||||
|
"rmg": None,
|
||||||
|
"other": "/api/debug",
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Cluster Json",
|
||||||
|
"hdhr": None,
|
||||||
|
"rmg": None,
|
||||||
|
"other": "/api/cluster?method=get",
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Lineup XML",
|
||||||
|
"hdhr": "/lineup.xml",
|
||||||
|
"rmg": None,
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Lineup JSON",
|
||||||
|
"hdhr": "/hdhr/lineup.json",
|
||||||
|
"rmg": None,
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Lineup Status",
|
||||||
|
"hdhr": "/hdhr/lineup_status.json",
|
||||||
|
"rmg": None,
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Discover Json",
|
||||||
|
"hdhr": "/hdhr/discover.json",
|
||||||
|
"rmg": None,
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "Device XML",
|
||||||
|
"hdhr": "/hdhr/device.xml",
|
||||||
|
"rmg": "/rmg/device.xml",
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Identification XML",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg",
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Devices Discover",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/discover",
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Devices Probe",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/probe?uri=%s" % base_url,
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Devices by DeviceKey",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/%s" % self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Channels by DeviceKey",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/%s/channels" % self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Scanners by DeviceKey",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/%s/scanners" % self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Networks by DeviceKey",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/%s/networks" % self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
button_list.append({
|
||||||
|
"label": "RMG Scan by DeviceKey",
|
||||||
|
"hdhr": "",
|
||||||
|
"rmg": "/rmg/devices/%s/scan" % self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"other": None,
|
||||||
|
})
|
||||||
|
|
||||||
|
return render_iptvorg-us('diagnostics.html', request=request, fhdhr=self.fhdhr, button_list=button_list)
|
||||||
52
fHDHR_web/pages/guide_html.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
|
||||||
|
class Guide_HTML():
|
||||||
|
endpoints = ["/guide", "/guide.html"]
|
||||||
|
endpoint_name = "page_guide_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
chan_guide_list = []
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.device.epg.def_method, type=str)
|
||||||
|
epg_methods = self.fhdhr.device.epg.valid_epg_methods
|
||||||
|
if source not in epg_methods:
|
||||||
|
source = self.fhdhr.device.epg.def_method
|
||||||
|
|
||||||
|
for channel in self.fhdhr.device.epg.whats_on_allchans(source):
|
||||||
|
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
||||||
|
|
||||||
|
chan_dict = {
|
||||||
|
"name": channel["name"],
|
||||||
|
"number": channel["number"],
|
||||||
|
"chan_thumbnail": channel["thumbnail"],
|
||||||
|
"listing_title": channel["listing"][0]["title"],
|
||||||
|
"listing_thumbnail": channel["listing"][0]["thumbnail"],
|
||||||
|
"listing_description": channel["listing"][0]["description"],
|
||||||
|
"remaining_time": str(remaining_time)
|
||||||
|
}
|
||||||
|
if source in ["blocks", "origin", self.fhdhr.config.dict["main"]["dictpopname"]]:
|
||||||
|
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", channel["id"])
|
||||||
|
|
||||||
|
chan_dict["name"] = chan_obj.dict["name"]
|
||||||
|
chan_dict["number"] = chan_obj.dict["number"]
|
||||||
|
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
|
||||||
|
chan_dict["enabled"] = chan_obj.dict["enabled"]
|
||||||
|
chan_dict["play_url"] = chan_obj.play_url
|
||||||
|
|
||||||
|
chan_guide_list.append(chan_dict)
|
||||||
|
|
||||||
|
return render_iptvorg-us('guide.html', request=request, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods, source=source)
|
||||||
27
fHDHR_web/pages/index_html.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Index_HTML():
|
||||||
|
endpoints = ["/index", "/index.html"]
|
||||||
|
endpoint_name = "page_index_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
||||||
|
max_tuners = self.fhdhr.device.tuners.max_tuners
|
||||||
|
|
||||||
|
fhdhr_status_dict = {
|
||||||
|
"Script Directory": str(self.fhdhr.config.internal["paths"]["script_dir"]),
|
||||||
|
"Config File": str(self.fhdhr.config.config_file),
|
||||||
|
"Cache Path": str(self.fhdhr.config.internal["paths"]["cache_dir"]),
|
||||||
|
"Total Channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"Tuner Usage": ("%s/%s" % (str(tuners_in_use), str(max_tuners))),
|
||||||
|
}
|
||||||
|
|
||||||
|
return render_iptvorg-us('index.html', request=request, fhdhr=self.fhdhr, fhdhr_status_dict=fhdhr_status_dict, list=list)
|
||||||
18
fHDHR_web/pages/origin_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Origin_HTML():
|
||||||
|
endpoints = ["/origin", "/origin.html"]
|
||||||
|
endpoint_name = "page_origin_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
origin_status_dict = self.fhdhr.originwrapper.get_status_dict()
|
||||||
|
origin_status_dict["Total Channels"] = len(self.fhdhr.device.channels.list)
|
||||||
|
return render_iptvorg-us('origin.html', request=request, fhdhr=self.fhdhr, origin_status_dict=origin_status_dict, list=list)
|
||||||
33
fHDHR_web/pages/settings_html.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Settings_HTML():
|
||||||
|
endpoints = ["/settings", "/settings.html"]
|
||||||
|
endpoint_name = "page_settings_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
web_settings_dict = {}
|
||||||
|
for config_section in list(self.fhdhr.config.conf_default.keys()):
|
||||||
|
web_settings_dict[config_section] = {}
|
||||||
|
|
||||||
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
||||||
|
real_config_section = config_section
|
||||||
|
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
real_config_section = "origin"
|
||||||
|
web_settings_dict[config_section][config_item] = {
|
||||||
|
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
||||||
|
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
||||||
|
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
||||||
|
}
|
||||||
|
if not len(web_settings_dict[config_section].keys()):
|
||||||
|
del web_settings_dict[config_section]
|
||||||
|
|
||||||
|
return render_iptvorg-us('settings.html', request=request, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
||||||
36
fHDHR_web/pages/tuners_html.py
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_filesize
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners_HTML():
|
||||||
|
endpoints = ["/tuners", "/tuners.html"]
|
||||||
|
endpoint_name = "page_streams_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuner_list = []
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
tuner_scanning = 0
|
||||||
|
for tuner in list(tuner_status.keys()):
|
||||||
|
tuner_dict = {
|
||||||
|
"number": str(tuner),
|
||||||
|
"status": str(tuner_status[tuner]["status"]),
|
||||||
|
}
|
||||||
|
if tuner_status[tuner]["status"] == "Active":
|
||||||
|
tuner_dict["channel_number"] = tuner_status[tuner]["channel"]
|
||||||
|
tuner_dict["method"] = tuner_status[tuner]["method"]
|
||||||
|
tuner_dict["play_duration"] = str(tuner_status[tuner]["Play Time"])
|
||||||
|
tuner_dict["downloaded"] = humanized_filesize(tuner_status[tuner]["downloaded"])
|
||||||
|
elif tuner_status[tuner]["status"] == "Scanning":
|
||||||
|
tuner_scanning += 1
|
||||||
|
|
||||||
|
tuner_list.append(tuner_dict)
|
||||||
|
|
||||||
|
return render_iptvorg-us('tuners.html', request=request, fhdhr=self.fhdhr, tuner_list=tuner_list, tuner_scanning=tuner_scanning)
|
||||||
18
fHDHR_web/pages/version_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_iptvorg-us
|
||||||
|
|
||||||
|
|
||||||
|
class Version_HTML():
|
||||||
|
endpoints = ["/version", "/version.html"]
|
||||||
|
endpoint_name = "page_version_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
version_dict = {}
|
||||||
|
for key in list(self.fhdhr.config.internal["versions"].keys()):
|
||||||
|
version_dict[key] = self.fhdhr.config.internal["versions"][key]
|
||||||
|
return render_iptvorg-us('version.html', request=request, fhdhr=self.fhdhr, version_dict=version_dict, list=list)
|
||||||