first commit
12
Dockerfile
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
FROM python:3.8-slim
|
||||||
|
|
||||||
|
RUN apt-get -qq update && \
|
||||||
|
apt-get -qq -y install ffmpeg gcc && \
|
||||||
|
apt-get autoclean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY ./ /app/
|
||||||
|
WORKDIR /app
|
||||||
|
RUN pip3 install -r requirements.txt
|
||||||
|
|
||||||
|
ENTRYPOINT ["python3", "/app/main.py", "--config", "/app/config/config.ini"]
|
||||||
13
LICENSE
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
Version 2, December 2004
|
||||||
|
|
||||||
|
Copyright (C) 2020 Sam Zick <Sam@deathbybandaid.net>
|
||||||
|
|
||||||
|
Everyone is permitted to copy and distribute verbatim or modified
|
||||||
|
copies of this license document, and changing it is allowed as long
|
||||||
|
as the name is changed.
|
||||||
|
|
||||||
|
DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE
|
||||||
|
TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
|
||||||
|
|
||||||
|
0. You just DO WHAT THE FUCK YOU WANT TO.
|
||||||
20
README.md
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
<p align="center">fHDHR_PlutoTV <img src="docs/images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
|
||||||
|
Welcome to the world of streaming content as a DVR device! We use some fancy python here to achieve a system of:
|
||||||
|
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
|
||||||
|
Please Check the [Docs](docs/README.md) for Installation information.
|
||||||
|
|
||||||
|
fHDHR is labeled as beta until we reach v1.0.0
|
||||||
|
|
||||||
|
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
|
||||||
|
|
||||||
|
|
||||||
|
Due to multiple issues, I'm dropping official support for Windows.
|
||||||
47
config.all.ini
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
# servicename = PlutoTV
|
||||||
|
# reponame = fHDHR_PlutoTV
|
||||||
|
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-PlutoTV
|
||||||
|
# reporting_firmware_name = fHDHR_PlutoTV
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[vlc]
|
||||||
|
# path = cvlc
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1048576
|
||||||
|
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
|
||||||
|
[plutotv]
|
||||||
|
# username = None
|
||||||
|
# password = None
|
||||||
|
# force_best = False
|
||||||
8
config.example.ini
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
[plutotv]
|
||||||
|
# username = None
|
||||||
|
# password = None
|
||||||
|
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
0
data/cache/PLACEHOLDER
vendored
Normal file
BIN
data/garamond.ttf
Normal file
148
data/internal_config/fhdhr.json
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
{
|
||||||
|
"main":{
|
||||||
|
"uuid":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"cache_dir":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"thread_method":{
|
||||||
|
"value": "multiprocessing",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fhdhr":{
|
||||||
|
"address":{
|
||||||
|
"value": "0.0.0.0",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"discovery_address":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": 5004,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_manufacturer":{
|
||||||
|
"value": "BoronDust",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_model":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_ver":{
|
||||||
|
"value": "20201001",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_tuner_type":{
|
||||||
|
"value": "Antenna",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"device_auth":{
|
||||||
|
"value": "fHDHR",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"require_auth":{
|
||||||
|
"value": false,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"epg":{
|
||||||
|
"images":{
|
||||||
|
"value": "pass",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"ffmpeg":{
|
||||||
|
"path":{
|
||||||
|
"value": "ffmpeg",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"vlc":{
|
||||||
|
"path":{
|
||||||
|
"value": "cvlc",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"bytes_per_read":{
|
||||||
|
"value": 1152000,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"direct_stream":{
|
||||||
|
"chunksize":{
|
||||||
|
"value": 1048576,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"logging":{
|
||||||
|
"level":{
|
||||||
|
"value": "WARNING",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"database":{
|
||||||
|
"type":{
|
||||||
|
"value": "sqlite",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"driver":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"user":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"pass":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"host":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"port":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"name":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
81
data/internal_config/serviceconf.json
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
{
|
||||||
|
"main":{
|
||||||
|
"servicename":{
|
||||||
|
"value": "PlutoTV",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"dictpopname":{
|
||||||
|
"value": "plutotv",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"reponame":{
|
||||||
|
"value": "fHDHR_PlutoTV",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"valid_epg_methods":{
|
||||||
|
"value": "None,blocks,origin",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
},
|
||||||
|
"required":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": false,
|
||||||
|
"config_web": false
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"fhdhr":{
|
||||||
|
"friendlyname":{
|
||||||
|
"value": "fHDHR-PlutoTV",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"stream_type":{
|
||||||
|
"value": "direct",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"tuner_count":{
|
||||||
|
"value": 4,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"reporting_firmware_name":{
|
||||||
|
"value": "fHDHR_PlutoTV",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"epg":{
|
||||||
|
"method":{
|
||||||
|
"value": "origin",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"update_frequency":{
|
||||||
|
"value": 14400,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"plutotv":{
|
||||||
|
"username":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
},
|
||||||
|
"password":{
|
||||||
|
"value": "none",
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true,
|
||||||
|
"config_web_hidden": true
|
||||||
|
},
|
||||||
|
"force_best":{
|
||||||
|
"value": false,
|
||||||
|
"config_file": true,
|
||||||
|
"config_web": true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
BIN
data/www/favicon.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
8
data/www/style.css
Normal file
@ -0,0 +1,8 @@
|
|||||||
|
.pull-right { float: right; }
|
||||||
|
|
||||||
|
.pull-lef { float: left; }
|
||||||
|
|
||||||
|
.center {
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
56
data/www/templates/base.html
Normal file
@ -0,0 +1,56 @@
|
|||||||
|
<html>
|
||||||
|
<head>
|
||||||
|
<title>{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</title>
|
||||||
|
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||||
|
<style>
|
||||||
|
table, th, td {border: 1px solid black;}
|
||||||
|
</style>
|
||||||
|
<link href="style.css" rel="stylesheet">
|
||||||
|
</head>
|
||||||
|
<body>
|
||||||
|
<h1 style="text-align: center;">
|
||||||
|
<span style="text-decoration: underline;"><strong><em>{{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</em></strong>
|
||||||
|
</span>
|
||||||
|
<img class="pull-left" src="/favicon.ico" alt="fHDHR Logo" width="100" height="100">
|
||||||
|
</h1>
|
||||||
|
<br><br>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
<button class="pull-left" onclick="OpenLink('/')">fHDHR</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/origin')">{{ fhdhr.config.dict["main"]["servicename"] }}</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/channels')">Channels</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/guide')">Guide</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/cluster')">Cluster</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/streams')">Streams</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/xmltv')">xmltv</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/version')">Version</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/diagnostics')">Diagnostics</a></button>
|
||||||
|
<button class="pull-left" onclick="OpenLink('/settings')">Settings</a></button>
|
||||||
|
|
||||||
|
<a class="pull-right" style="padding: 5px;" href="/api/xmltv?method=get&source={{ fhdhr.device.epg.def_method }}">xmltv</a>
|
||||||
|
<a class="pull-right" style="padding: 5px;" href="/api/m3u?method=get&channel=all">m3u</a>
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<hr align="center" width="100%">
|
||||||
|
|
||||||
|
{% set locations = fhdhr.device.cluster.get_cluster_dicts_web() %}
|
||||||
|
{% if locations %}
|
||||||
|
<div>
|
||||||
|
{% for location in locations %}
|
||||||
|
<button class="pull-left" onclick="OpenLink('{{ location["base_url"] }}')">{{ location["name"] }}</a></button>
|
||||||
|
{% endfor %}
|
||||||
|
</div>
|
||||||
|
<hr align="center" width="100%">
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% set retmessage = request.args.get('retmessage', default=None) %}
|
||||||
|
{% if retmessage %}
|
||||||
|
<p>{{ retmessage }}</p>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% block content %}{% endblock %}
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
<script>
|
||||||
|
function OpenLink(NewURL) {window.open(NewURL, "_self");}
|
||||||
|
</script>
|
||||||
57
data/www/templates/channels.html
Normal file
@ -0,0 +1,57 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">What's On {{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Play</th>
|
||||||
|
<th>Channel Name</th>
|
||||||
|
<th>Channel CallSign</th>
|
||||||
|
<th>Channel Number</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Update</th>
|
||||||
|
<th>Reset</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for chan_dict in channelslist %}
|
||||||
|
<tr>
|
||||||
|
<td>
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<a href="{{ chan_dict["play_url"] }}">Play</a>
|
||||||
|
{% endif %}
|
||||||
|
</td>
|
||||||
|
|
||||||
|
<form method="post" action="/api/channels?method=update&redirect=%2Fchannels">
|
||||||
|
<input type="hidden" name="id" value={{ chan_dict["id"] }}>
|
||||||
|
<td data-th="Channel Name"><input type="text" size="50" name="name" value={{ chan_dict["name"] }}></td>
|
||||||
|
<td data-th="Channel Calsign"><input type="text" size="50" name="callsign" value={{ chan_dict["callsign"] }}></td>
|
||||||
|
<td data-th="Channel Number"><input type="text" size="50" name="number" value={{ chan_dict["number"] }}></td>
|
||||||
|
<td>
|
||||||
|
<select name="enabled">
|
||||||
|
{% if chan_dict["enabled"] %}
|
||||||
|
<option value=True selected>Enabled</option>
|
||||||
|
<option value=False>Disabled</option>
|
||||||
|
{% else %}
|
||||||
|
<option value=True>Enabled</option>
|
||||||
|
<option value=False selected>Disabled</option>
|
||||||
|
{% endif %}
|
||||||
|
</select>
|
||||||
|
</td>
|
||||||
|
<td data-th="Update"><input type="submit" value="Update"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<form method="post" action="/api/channels?method=update&redirect=%2Fchannels">
|
||||||
|
<input type="hidden" name="id" value={{ chan_dict["id"] }}>
|
||||||
|
<input type="hidden" name="name" value={{ chan_dict["origin_name"] }}>
|
||||||
|
<input type="hidden" name="callsign" value={{ chan_dict["origin_callsign"] }}>
|
||||||
|
<input type="hidden" name="number" value={{ chan_dict["origin_number"] }}>
|
||||||
|
<input type="hidden" name="enabled" value=True>
|
||||||
|
<td data-th="Reset"><input type="submit" value="Reset"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
50
data/www/templates/cluster.html
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">Cluster</h4>
|
||||||
|
{% if not fhdhr.config.dict["fhdhr"]["discovery_address"] %}
|
||||||
|
<p style="text-align: center;">Discovery Address must be set for SSDP/Cluster</p>
|
||||||
|
{% else %}
|
||||||
|
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=scan&redirect=%2Fcluster')">Force Scan</a></button>
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=disconnect&redirect=%2Fcluster')">Disconnect</a></button>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Location</th>
|
||||||
|
<th>Joined</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for location in locations_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ location["name"] }}</td>
|
||||||
|
<td>{{ location["location"] }}</td>
|
||||||
|
<td>{{ location["joined"] }}</td>
|
||||||
|
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
|
||||||
|
{% if location["joined"] in ["True", "False"] %}
|
||||||
|
<button onclick="OpenLink('{{ location["location"] }}')">Visit</a></button>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% if location["joined"] == "True" %}
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=del&location={{ location["url_query"] }}&redirect=%2Fcluster')">Remove</a></button>
|
||||||
|
{% elif location["joined"] == "False" %}
|
||||||
|
<button onclick="OpenLink('/api/cluster?method=add&location={{ location["url_query"] }}&redirect=%2Fcluster')">Add</a></button>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
11
data/www/templates/diagnostics.html
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
{% for button_item in button_list %}
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<p><button onclick="OpenLink('{{ button_item[1] }}')">{{ button_item[0] }}</a></button></p>
|
||||||
|
</div>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
36
data/www/templates/guide.html
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">What's On {{ fhdhr.config.dict["fhdhr"]["friendlyname"] }}</h4>
|
||||||
|
|
||||||
|
<p>
|
||||||
|
{% for epg_method in epg_methods %}
|
||||||
|
<button onclick="OpenLink('/guide?source={{ epg_method }}')">{{ epg_method }}</a></button>
|
||||||
|
{% endfor %}
|
||||||
|
</p>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Channel Name</th>
|
||||||
|
<th>Channel Number</th>
|
||||||
|
<th>Channel Thumbnail</th>
|
||||||
|
<th>Content Title</th>
|
||||||
|
<th>Content Thumbnail</th>
|
||||||
|
<th>Content Description</th>
|
||||||
|
<th>Content Remaining Time</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for chan_dict in chan_guide_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ chan_dict["name"] }}</td>
|
||||||
|
<td>{{ chan_dict["number"] }}</td>
|
||||||
|
<td><img src="{{ chan_dict["chan_thumbnail"] }}" alt="{{ chan_dict["name"] }}" width="100" height="100">
|
||||||
|
<td>{{ chan_dict["listing_title"] }}</td>
|
||||||
|
<td><img src="{{ chan_dict["listing_thumbnail"] }}" alt="{{ chan_dict["listing_title"] }}" width="100" height="100">
|
||||||
|
<td>{{ chan_dict["listing_description"] }}</td>
|
||||||
|
<td>{{ chan_dict["remaining_time"] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
20
data/www/templates/index.html
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Status</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(fhdhr_status_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ fhdhr_status_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
25
data/www/templates/origin.html
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["main"]["servicename"] }} Status</h4>
|
||||||
|
|
||||||
|
<div style="text-align: center;">
|
||||||
|
<button onclick="OpenLink('/api/channels?method=scan&redirect=%2Forigin')">Force Channel Update</a></button>
|
||||||
|
</div>
|
||||||
|
<br>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(origin_status_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ origin_status_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
60
data/www/templates/settings.html
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Settings</h4>
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">Settings will require a manual restart.</h4>
|
||||||
|
|
||||||
|
{% for config_section in list(web_settings_dict.keys()) %}
|
||||||
|
|
||||||
|
{% if config_section == "origin" %}
|
||||||
|
<h4 style="text-align: center;">{{ fhdhr.config.dict["main"]["dictpopname"] }}</h4>
|
||||||
|
{% else %}
|
||||||
|
<h4 style="text-align: center;">{{ config_section }}</h4>
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Config Name</th>
|
||||||
|
<th>Config Default Value</th>
|
||||||
|
<th>Config Value</th>
|
||||||
|
<th>Update</th>
|
||||||
|
<th>Reset</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for config_item in list(web_settings_dict[config_section].keys()) %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td data-th="Config Name">{{ config_item }}</td>
|
||||||
|
|
||||||
|
<td data-th="Config Default Value">{{ web_settings_dict[config_section][config_item]["value_default"] }}</td>
|
||||||
|
|
||||||
|
<form method="post" action="/api/settings?method=update&redirect=%2Fsettings">
|
||||||
|
<input type="hidden" name="config_section" value={{ config_section }}>
|
||||||
|
<input type="hidden" name="config_name" value={{ config_item }}>
|
||||||
|
<input type="hidden" name="config_default" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
{% if web_settings_dict[config_section][config_item]["hide"] %}
|
||||||
|
<td data-th="Config Value"><input type="text" size="50" name="config_value" value=**************></td>
|
||||||
|
{% else %}
|
||||||
|
<td data-th="Config Value"><input type="text" size="50" name="config_value" value={{ web_settings_dict[config_section][config_item]["value"] }}></td>
|
||||||
|
{% endif %}
|
||||||
|
<td data-th="Update"><input type="submit" value="Update"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
<form method="post" action="/api/settings?method=update&redirect=%2Fsettings">
|
||||||
|
<input type="hidden" name="config_section" value={{ config_section }}>
|
||||||
|
<input type="hidden" name="config_name" value={{ config_item }}>
|
||||||
|
<input type="hidden" name="config_value" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
<input type="hidden" name="config_default" value={{ web_settings_dict[config_section][config_item]["value_default"] }}>
|
||||||
|
<td data-th="Reset"><input type="submit" value="Reset"></td>
|
||||||
|
</form>
|
||||||
|
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
40
data/www/templates/streams.html
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Streams</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:100%">
|
||||||
|
<tr>
|
||||||
|
<th>Tuner</th>
|
||||||
|
<th>Status</th>
|
||||||
|
<th>Channel</th>
|
||||||
|
<th>Method</th>
|
||||||
|
<th>Time Active</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for tuner_dict in tuner_list %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ tuner_dict["number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["status"] }}</td>
|
||||||
|
{% if tuner_dict["status"] == "Active" %}
|
||||||
|
<td>{{ tuner_dict["channel_number"] }}</td>
|
||||||
|
<td>{{ tuner_dict["method"] }}</td>
|
||||||
|
<td>{{ tuner_dict["play_duration"] }}</td>
|
||||||
|
{% else %}
|
||||||
|
<td>N/A</td>
|
||||||
|
<td>N/A</td>
|
||||||
|
<td>N/A</td>
|
||||||
|
{% endif %}
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
{% if tuner_dict["status"] in ["Active", "Acquired"] %}
|
||||||
|
<button onclick="OpenLink('/api/watch?method=close&tuner={{ tuner_dict["number"] }}&redirect=%2Fstreams')">Close</a></button>
|
||||||
|
{% endif %}
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
20
data/www/templates/version.html
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">fHDHR Version Information</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th></th>
|
||||||
|
<th></th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for key in list(version_dict.keys()) %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ key }}</td>
|
||||||
|
<td>{{ version_dict[key] }}</td>
|
||||||
|
</tr>
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
38
data/www/templates/xmltv.html
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
{% extends "base.html" %}
|
||||||
|
|
||||||
|
{% block content %}
|
||||||
|
|
||||||
|
<h4 style="text-align: center;">xmltv</h4>
|
||||||
|
|
||||||
|
<table class="center" style="width:50%">
|
||||||
|
<tr>
|
||||||
|
<th>Version</th>
|
||||||
|
<th>XMLTV Link</th>
|
||||||
|
<th>EPG Link</th>
|
||||||
|
<th>Options</th>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
{% for epg_method in fhdhr.config.dict["main"]["valid_epg_methods"] %}
|
||||||
|
{% if epg_method not in [None, "None"] %}
|
||||||
|
{% set epg_method_name = epg_method %}
|
||||||
|
{% if epg_method == "origin" %}
|
||||||
|
{% set epg_method_name = fhdhr.config.dict["main"]["dictpopname"] %}
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
|
<tr>
|
||||||
|
<td>{{ epg_method_name }}</td>
|
||||||
|
<td><a href="/api/xmltv?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
||||||
|
<td><a href="/api/epg?method=get&source={{ epg_method }}">{{ epg_method_name }}</a></td>
|
||||||
|
<td>
|
||||||
|
<div>
|
||||||
|
<button onclick="OpenLink('/api/xmltv?method=update&source={{ epg_method }}&redirect=%2Fxmltv')">Update</a></button>
|
||||||
|
<button onclick="OpenLink('/api/xmltv?method=clearcache&source={{ epg_method }}&redirect=%2Fxmltv')">Clear Cache</a></button>
|
||||||
|
</div>
|
||||||
|
</td>
|
||||||
|
</tr>
|
||||||
|
|
||||||
|
|
||||||
|
{% endif %}
|
||||||
|
{% endfor %}
|
||||||
|
|
||||||
|
{% endblock %}
|
||||||
129
docs/ADV_Config.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
Here, we'll break down all of the configuration options per section.
|
||||||
|
|
||||||
|
## Main
|
||||||
|
Here's the `main` section.
|
||||||
|
* `uuid` will be created automatically, you need not worry about this.
|
||||||
|
* `cache_dir` is handy for keeping cached files out of the script directory. This is helpful for reinstalls as well as development.
|
||||||
|
|
||||||
|
````
|
||||||
|
[main]
|
||||||
|
# uuid =
|
||||||
|
# cache_dir =
|
||||||
|
````
|
||||||
|
|
||||||
|
## fhdhr
|
||||||
|
|
||||||
|
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
|
||||||
|
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
|
||||||
|
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
|
||||||
|
* `tuner_count` is a limit of devices able to stream from the script.
|
||||||
|
* `friendlyname` is to set the name that Plex sees the script as.
|
||||||
|
* `stream_type` can be set to `ffmpeg`, `vlc` or `direct`.
|
||||||
|
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# stream_type = direct
|
||||||
|
# tuner_count = 4
|
||||||
|
# friendlyname = fHDHR-PlutoTV
|
||||||
|
# reporting_firmware_name = fHDHR_PlutoTV
|
||||||
|
# reporting_manufacturer = BoronDust
|
||||||
|
# reporting_model = fHDHR
|
||||||
|
# reporting_firmware_ver = 20201001
|
||||||
|
# reporting_tuner_type = Antenna
|
||||||
|
# device_auth = fHDHR
|
||||||
|
````
|
||||||
|
|
||||||
|
# EPG
|
||||||
|
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
|
||||||
|
* `method` defaults to `origin` and will pull the xmltv data from PlutoTV. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
|
||||||
|
* `update_frequency` * `epg_update_frequency` determines how often we check for new scheduling information. In Seconds.
|
||||||
|
|
||||||
|
````
|
||||||
|
[epg]
|
||||||
|
# images = pass
|
||||||
|
# method = origin
|
||||||
|
# update_frequency = 43200
|
||||||
|
````
|
||||||
|
|
||||||
|
## ffmpeg
|
||||||
|
|
||||||
|
The `ffmpeg` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[ffmpeg]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## vlc
|
||||||
|
|
||||||
|
The `vlc` section includes:
|
||||||
|
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
|
||||||
|
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
|
||||||
|
|
||||||
|
````
|
||||||
|
[vlc]
|
||||||
|
# path = ffmpeg
|
||||||
|
# bytes_per_read = 1152000
|
||||||
|
````
|
||||||
|
|
||||||
|
## direct_stream
|
||||||
|
|
||||||
|
The `direct_stream` section is for when you set the `[fhdhr]stream_type` to `direct`
|
||||||
|
* `chunksize` is how much data to read at a time.
|
||||||
|
|
||||||
|
````
|
||||||
|
[direct_stream]
|
||||||
|
# chunksize = 1024*1024
|
||||||
|
````
|
||||||
|
|
||||||
|
# Logging
|
||||||
|
* `level` determines the amount of logging you wish to see in the console, as well as to the logfile (stored in your cache directory).
|
||||||
|
|
||||||
|
````
|
||||||
|
[logging]
|
||||||
|
# level = WARNING
|
||||||
|
````
|
||||||
|
|
||||||
|
# Database
|
||||||
|
* experiment with these settings at your own risk. We use sqlalchemy to provide database options, but we default to sqlite.
|
||||||
|
|
||||||
|
TODO: improve documentation here.
|
||||||
|
|
||||||
|
````
|
||||||
|
[database]
|
||||||
|
# type = sqlite
|
||||||
|
# driver = None
|
||||||
|
````
|
||||||
|
|
||||||
|
## PlutoTV
|
||||||
|
The `plutotv` section
|
||||||
|
|
||||||
|
````
|
||||||
|
[plutotv]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
41
docs/Config.md
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
The example config file contains all of the things that the typical user may need to fill out.
|
||||||
|
|
||||||
|
Please see the Advanced Configuration page for more information.
|
||||||
|
|
||||||
|
## fHDHR
|
||||||
|
|
||||||
|
Under `fhdhr`, you'll find 2 addresses listed. `0.0.0.0` works great for a listen address, however, it seems that SSDP works best if the discovery address is set to the IP to say that there is a service at.
|
||||||
|
|
||||||
|
````
|
||||||
|
[fhdhr]
|
||||||
|
# address = 0.0.0.0
|
||||||
|
# port = 5004
|
||||||
|
# discovery_address = 0.0.0.0
|
||||||
|
````
|
||||||
|
|
||||||
|
## PlutoTV
|
||||||
|
|
||||||
|
PlutoTV requires signin credentials, so add those.
|
||||||
|
|
||||||
|
````
|
||||||
|
[plutotv]
|
||||||
|
# username =
|
||||||
|
# password =
|
||||||
|
````
|
||||||
15
docs/Origin.md
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This varient of fHDHR connects to [PlutoTV](https://pluto.tv/about-us).
|
||||||
46
docs/README.md
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# The Boring Disclaimers (at the top of the docs for a reason)
|
||||||
|
|
||||||
|
|
||||||
|
fHDHR is a Python service to take various sources of video and make them accessible to client software including, but not limited to:
|
||||||
|
|
||||||
|
* [Plex](https://www.plex.tv/)
|
||||||
|
* [Emby](https://emby.media/)
|
||||||
|
* [Jellyfin](https://jellyfin.org/)
|
||||||
|
* [Channels](https://getchannels.com/)
|
||||||
|
|
||||||
|
fHDHR is not directly affiliated with the above client software, and you will receive NO support for this script via their forums.
|
||||||
|
|
||||||
|
fHDHR is able to connect to clients by emulating a piece of hardware called the [HDHomeRun from SiliconDust](https://www.silicondust.com/). fHDHR is in NO way affiliated with SiliconDust, and is NOT a HDHomeRun device. fHDHR simply uses the API structure used by the authentic HDHomeRun to connect to client DVR solutions.
|
||||||
|
|
||||||
|
# History
|
||||||
|
|
||||||
|
I got the Huappage QuadHD, and the Mohu Sail as a pandemic-project. All was fine working within Plex, but I also have emby setup as a backup to Plex when auth is broken.
|
||||||
|
|
||||||
|
I thought to myself, "Self, I should look on github for a way to share my tv tuner between the two".
|
||||||
|
|
||||||
|
That's when I tried both npvrProxy with NextPVR as well as tvhProxy with TVHeadend. I had to tinker with both to get them working, but I started testing which one I liked more.
|
||||||
|
|
||||||
|
Around this same time, I stumbled upon [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex). I wanted to contribute to that project to get it to a point that I could fork it to work for other video stream sources.
|
||||||
|
|
||||||
|
The locast2plex code development wasn't going quite fast enough for the feature-creep in my head.
|
||||||
|
|
||||||
|
I then proceded to create the initial iteration of fHDHR which I originally called "FakeHDHR". I've rewritten the core functionality a few times before landing on the current code structure, which feels 'right'.
|
||||||
|
|
||||||
|
I've worked really hard to create a structure that simplifies new variants of the core code to work with different 'origin' streams. Combining these works really well with [xTeVe](https://github.com/xteve-project/xTeVe).
|
||||||
|
|
||||||
|
One of the variants goes as far as scraping a table from a PDF file for creating a channel guide!
|
||||||
|
|
||||||
|
I can easily create more variants of the project to do other video sources. Paid ones, I could potentially accept donations for, as I don't want to pay to develop for multiple platforms.
|
||||||
26
docs/Related-Projects.md
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
While the fHDHR reops share very little code from the below projects, they were a source of inspiration:
|
||||||
|
|
||||||
|
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
|
||||||
|
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
|
||||||
|
|
||||||
|
|
||||||
|
Aside from the above, these other projects are worth a look as well:
|
||||||
|
|
||||||
|
* [npvrProxy](https://github.com/rogueosb/npvrProxy)
|
||||||
|
* [xTeVe](https://xteve.de/)
|
||||||
|
* [telly](https://github.com/tellytv/telly)
|
||||||
|
* [dizquetv](https://github.com/vexorian/dizquetv)
|
||||||
129
docs/Usage.md
Normal file
@ -0,0 +1,129 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
# Author Notes
|
||||||
|
|
||||||
|
* All Testing is currently done in Proxmox LXC, Ubuntu 20.04, Python 3.8
|
||||||
|
|
||||||
|
|
||||||
|
# Prerequisites
|
||||||
|
|
||||||
|
* A Linux or Mac "Server". Windows currently does not work. A "Server" is a computer that is typically always online.
|
||||||
|
* Python 3.7 or later.
|
||||||
|
* Consult [This Page](Origin.md) for additional setup specific to this variant of fHDHR.
|
||||||
|
|
||||||
|
|
||||||
|
# Optional Prerequisites
|
||||||
|
* If you intend to use Docker, [This Guide](https://docs.docker.com/get-started/) should help you get started. The author of fHDHR is not a docker user, but will still try to help.
|
||||||
|
|
||||||
|
fHDHR uses direct connections with video sources by default. Alternatively, you can install and update the [config](Config.md) accordingly. You will need to make these available to your systems PATH, or manually set their path via the config file.
|
||||||
|
|
||||||
|
* ffmpeg
|
||||||
|
* vlc
|
||||||
|
|
||||||
|
|
||||||
|
# Installation
|
||||||
|
|
||||||
|
## Linux
|
||||||
|
|
||||||
|
* Download the zip, or git clone
|
||||||
|
* Navigate into your script directory and run `pip3 install -r requirements.txt`
|
||||||
|
* Copy the included `config.example.ini` file to a known location. The script will not run without this. There is no default configuration file location. [Modify the configuration file to suit your needs.](Config.md)
|
||||||
|
|
||||||
|
* Run with `python3 main.py -c=` and the path to the config file.
|
||||||
|
|
||||||
|
|
||||||
|
## Docker
|
||||||
|
This portion of the guide assumes you are using a Linux system with both docker and docker-compose installed. This (or some variation thereof) may work on Mac or Windows, but has not been tested.
|
||||||
|
|
||||||
|
* this guide assumes we wish to use the `~/fhdhr` directory for our install (you can use whatever directory you like, just make the appropriate changes elsewhere in this guide) and that we are installing for PlutoTV support
|
||||||
|
* run the following commands to clone the repo into `~/fhdhr/fHDHR_PlutoTV`
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_PlutoTV.git
|
||||||
|
```
|
||||||
|
* create your config.ini file (as described earlier in this guide) in the `~/fhdhr/fHDHR_PlutoTV` directory
|
||||||
|
* while still in the `~/fhdhr` directory, create the following `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
plutotv:
|
||||||
|
build: ./fHDHR_PlutoTV
|
||||||
|
container_name: plutotv
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_PlutoTV/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d plutotv
|
||||||
|
```
|
||||||
|
|
||||||
|
After a short period of time (during which docker will build your new fHDHR container), you should now have a working build of fHDHR running inside a docker container.
|
||||||
|
|
||||||
|
As the code changes and new versions / bug fixes are released, at any point you can pull the latest version of the code and rebuild your container with the following commands:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr/fHDHR_PlutoTV
|
||||||
|
git checkout master
|
||||||
|
git pull
|
||||||
|
cd ~/fhdhr
|
||||||
|
docker-compose up --build -d plutotv
|
||||||
|
```
|
||||||
|
<hr />
|
||||||
|
|
||||||
|
You can also run multiple instances of fHDHR to support additional sources by cloning the appropriate repo into your `~/fhdhr` directory and adding the necessary services to the docker-compose file we created above.
|
||||||
|
|
||||||
|
* for example, if we also wanted PlutoTV support, you would clone the PlutoTV repository:
|
||||||
|
```
|
||||||
|
cd ~/fhdhr
|
||||||
|
git clone https://github.com/fHDHR/fHDHR_PlutoTV.git
|
||||||
|
```
|
||||||
|
* **NOTE**: if you are running multiple services on the same machine, you must change the port in your config.ini file for each one. For example, if PlutoTV was using the default port of 5004, PlutoTV cannot also use that port. You must change the port in your PlutoTV config.ini file to something else (5005, for example).
|
||||||
|
* add plutotv as a service in your `docker-compose.yml` file
|
||||||
|
```
|
||||||
|
version: '3'
|
||||||
|
|
||||||
|
services:
|
||||||
|
plutotv:
|
||||||
|
build: ./fHDHR_PlutoTV
|
||||||
|
container_name: plutotv
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_PlutoTV/config.ini:/app/config/config.ini
|
||||||
|
|
||||||
|
plutotv:
|
||||||
|
build: ./fHDHR_PlutoTV
|
||||||
|
container_name: plutotv
|
||||||
|
network_mode: host
|
||||||
|
volumes:
|
||||||
|
- ./fHDHR_PlutoTV/config.ini:/app/config/config.ini
|
||||||
|
```
|
||||||
|
* run the following command to build and launch the container
|
||||||
|
```
|
||||||
|
docker-compose up --build -d plutotv
|
||||||
|
```
|
||||||
|
|
||||||
|
You can repeat these instructions for as many fHDHR containers as your system resources will allow.
|
||||||
|
|
||||||
|
# Setup
|
||||||
|
|
||||||
|
Now that you have fHDHR running, You can navigate (in a web browser) to the IP:Port from the configuration step above.
|
||||||
|
|
||||||
|
If you did not setup a `discovery_address` in your config, SSDP will be disabled. This is not a problem as clients like Plex can have the IP:Port entered manually!
|
||||||
|
|
||||||
|
You can copy the xmltv link from the webUI and use that in your client software to provide Channel Guide information.
|
||||||
98
docs/WebUI.md
Normal file
@ -0,0 +1,98 @@
|
|||||||
|
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
|
||||||
|
|
||||||
|
---
|
||||||
|
[Main](README.md) | [Setup and Usage](Usage.md) | [PlutoTV](Origin.md) | [Credits/Related Projects](Related-Projects.md)
|
||||||
|
---
|
||||||
|
**f**un
|
||||||
|
**H**ome
|
||||||
|
**D**istribution
|
||||||
|
**H**iatus
|
||||||
|
**R**ecreation
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
|
||||||
|
This Page will introduce basic handling of the script from the Web Interface provided at IP:Port
|
||||||
|
|
||||||
|
The Pages are available in the buttons at the top, links to xmltv and m3u are provided at the top for ease of access.
|
||||||
|
|
||||||
|
|
||||||
|
# Main Landing Page
|
||||||
|
|
||||||
|
Below is the main landing page with basic information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_main.PNG" alt="Main Page"/>
|
||||||
|
|
||||||
|
# PlutoTV
|
||||||
|
|
||||||
|
Here you will have access to some basic information about the service we are proxying.
|
||||||
|
|
||||||
|
The webUI will still work, even if setup didn't go smoothly.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_origin.PNG" alt="Origin Page"/>
|
||||||
|
|
||||||
|
# Guide
|
||||||
|
|
||||||
|
This Page give you information about what is currently playing on all stations. It will also show the time remaining for each item.
|
||||||
|
|
||||||
|
* Note: The Play link in the left hand column can be copied to play a channel in VLC media player!
|
||||||
|
|
||||||
|
<img src="screenshots/webui_guide.PNG" alt="Guide Page"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Cluster
|
||||||
|
|
||||||
|
Since SSDP is used for service discovery, I decided to also use it for ease of management.
|
||||||
|
|
||||||
|
This tab will not have the below options if SSDP isn't running.
|
||||||
|
|
||||||
|
Joining a cluster will provide a second row of buttons for the clustered servers.
|
||||||
|
|
||||||
|
Unjoined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_unjoined.PNG" alt="Cluster Page, UnJoined"/>
|
||||||
|
|
||||||
|
Joined:
|
||||||
|
|
||||||
|
<img src="screenshots/webui_cluster_joined.PNG" alt="Cluster Page, Joined"/>
|
||||||
|
|
||||||
|
|
||||||
|
# Streams
|
||||||
|
|
||||||
|
This Page will show all active streams, and tuner information. You can also terminate a stream from here.
|
||||||
|
|
||||||
|
* Note: Clients will often have an amount buffered, and the connection termination is not immediate from a viewing perspective. However, the connection to the source is indeed cut off.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_streams.PNG" alt="Streams Page"/>
|
||||||
|
|
||||||
|
# xmltv
|
||||||
|
|
||||||
|
This page will give you access to all the xmltv formats provided by this varient.
|
||||||
|
|
||||||
|
From here, you can manually update or even clear the cached epg, and then update.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_xmltv.PNG" alt="xmltv Page"/>
|
||||||
|
|
||||||
|
# Version
|
||||||
|
|
||||||
|
This page will give valuable information about the environment the script is being run in.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_version.PNG" alt="Version Page"/>
|
||||||
|
|
||||||
|
# Diganostics
|
||||||
|
|
||||||
|
This page has various links to json/xml files that make the magic work, as well as debug and cluster information.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
|
|
||||||
|
# Settings
|
||||||
|
|
||||||
|
This page allows viewing/changing all possible configuration options.
|
||||||
|
|
||||||
|
* Note: This will require a restart of the script to have any effect.
|
||||||
|
|
||||||
|
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
|
||||||
BIN
docs/images/logo.ico
Normal file
|
After Width: | Height: | Size: 15 KiB |
BIN
docs/screenshots/webui_cluster_joined.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_cluster_unjoined.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
BIN
docs/screenshots/webui_diagnostics.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_guide.PNG
Normal file
|
After Width: | Height: | Size: 137 KiB |
BIN
docs/screenshots/webui_main.PNG
Normal file
|
After Width: | Height: | Size: 23 KiB |
BIN
docs/screenshots/webui_origin.PNG
Normal file
|
After Width: | Height: | Size: 30 KiB |
BIN
docs/screenshots/webui_settings.PNG
Normal file
|
After Width: | Height: | Size: 53 KiB |
BIN
docs/screenshots/webui_streams.PNG
Normal file
|
After Width: | Height: | Size: 39 KiB |
BIN
docs/screenshots/webui_version.PNG
Normal file
|
After Width: | Height: | Size: 26 KiB |
BIN
docs/screenshots/webui_xmltv.PNG
Normal file
|
After Width: | Height: | Size: 27 KiB |
34
fHDHR/__init__.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
from .origin import OriginServiceWrapper
|
||||||
|
from .device import fHDHR_Device
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
|
||||||
|
fHDHR_VERSION = "v0.4.0-beta"
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_INT_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db):
|
||||||
|
self.version = fHDHR_VERSION
|
||||||
|
self.config = settings
|
||||||
|
self.logger = logger
|
||||||
|
self.db = db
|
||||||
|
|
||||||
|
self.web = fHDHR.tools.WebReq()
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_OBJ():
|
||||||
|
|
||||||
|
def __init__(self, settings, logger, db):
|
||||||
|
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db)
|
||||||
|
|
||||||
|
self.origin = OriginServiceWrapper(self.fhdhr)
|
||||||
|
|
||||||
|
self.device = fHDHR_Device(self.fhdhr, self.origin)
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr." + name)
|
||||||
0
fHDHR/cli/__init__.py
Normal file
114
fHDHR/cli/run.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import argparse
|
||||||
|
import time
|
||||||
|
import multiprocessing
|
||||||
|
import threading
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
|
||||||
|
import fHDHR.exceptions
|
||||||
|
import fHDHR.config
|
||||||
|
from fHDHR.http import fHDHR_HTTP_Server
|
||||||
|
from fHDHR.db import fHDHRdb
|
||||||
|
|
||||||
|
ERR_CODE = 1
|
||||||
|
ERR_CODE_NO_RESTART = 2
|
||||||
|
|
||||||
|
|
||||||
|
if sys.version_info.major == 2 or sys.version_info < (3, 7):
|
||||||
|
print('Error: fHDHR requires python 3.7+.')
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
if opersystem in ["Windows"]:
|
||||||
|
print("WARNING: This script may fail on Windows. Try Setting the `thread_method` to `threading`")
|
||||||
|
|
||||||
|
|
||||||
|
def build_args_parser():
|
||||||
|
"""Build argument parser for fHDHR"""
|
||||||
|
parser = argparse.ArgumentParser(description='fHDHR')
|
||||||
|
parser.add_argument('-c', '--config', dest='cfg', type=str, required=True, help='configuration file to load.')
|
||||||
|
return parser.parse_args()
|
||||||
|
|
||||||
|
|
||||||
|
def get_configuration(args, script_dir):
|
||||||
|
if not os.path.isfile(args.cfg):
|
||||||
|
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
|
||||||
|
return fHDHR.config.Config(args.cfg, script_dir)
|
||||||
|
|
||||||
|
|
||||||
|
def run(settings, logger, db):
|
||||||
|
|
||||||
|
fhdhr = fHDHR_OBJ(settings, logger, db)
|
||||||
|
fhdhrweb = fHDHR_HTTP_Server(fhdhr)
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
print("HTTP Server Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_web = threading.Thread(target=fhdhrweb.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_web.start()
|
||||||
|
|
||||||
|
if settings.dict["fhdhr"]["discovery_address"]:
|
||||||
|
print("SSDP Server Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_ssdp = threading.Thread(target=fhdhr.device.ssdp.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_ssdp.start()
|
||||||
|
|
||||||
|
if settings.dict["epg"]["method"]:
|
||||||
|
print("EPG Update Starting")
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing"]:
|
||||||
|
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
|
||||||
|
elif settings.dict["main"]["thread_method"] in ["threading"]:
|
||||||
|
fhdhr_epg = threading.Thread(target=fhdhr.device.epg.run)
|
||||||
|
if settings.dict["main"]["thread_method"] in ["multiprocessing", "threading"]:
|
||||||
|
fhdhr_epg.start()
|
||||||
|
|
||||||
|
# wait forever
|
||||||
|
while True:
|
||||||
|
time.sleep(3600)
|
||||||
|
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
def start(args, script_dir):
|
||||||
|
"""Get Configuration for fHDHR and start"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
settings = get_configuration(args, script_dir)
|
||||||
|
except fHDHR.exceptions.ConfigurationError as e:
|
||||||
|
print(e)
|
||||||
|
return ERR_CODE_NO_RESTART
|
||||||
|
|
||||||
|
logger = settings.logging_setup()
|
||||||
|
|
||||||
|
db = fHDHRdb(settings)
|
||||||
|
|
||||||
|
return run(settings, logger, db)
|
||||||
|
|
||||||
|
|
||||||
|
def main(script_dir):
|
||||||
|
"""fHDHR run script entry point"""
|
||||||
|
|
||||||
|
print("Loading fHDHR " + fHDHR_VERSION)
|
||||||
|
|
||||||
|
try:
|
||||||
|
args = build_args_parser()
|
||||||
|
return start(args, script_dir)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print("\n\nInterrupted")
|
||||||
|
return ERR_CODE
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
main()
|
||||||
301
fHDHR/config/__init__.py
Normal file
@ -0,0 +1,301 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import random
|
||||||
|
import configparser
|
||||||
|
import pathlib
|
||||||
|
import logging
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
import json
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
from fHDHR import fHDHR_VERSION
|
||||||
|
from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
|
||||||
|
|
||||||
|
|
||||||
|
class Config():
|
||||||
|
|
||||||
|
def __init__(self, filename, script_dir):
|
||||||
|
self.internal = {}
|
||||||
|
self.conf_default = {}
|
||||||
|
self.dict = {}
|
||||||
|
self.config_file = filename
|
||||||
|
|
||||||
|
self.initial_load(script_dir)
|
||||||
|
self.config_verification()
|
||||||
|
|
||||||
|
def initial_load(self, script_dir):
|
||||||
|
|
||||||
|
data_dir = pathlib.Path(script_dir).joinpath('data')
|
||||||
|
www_dir = pathlib.Path(data_dir).joinpath('www')
|
||||||
|
|
||||||
|
self.internal["paths"] = {
|
||||||
|
"script_dir": script_dir,
|
||||||
|
"data_dir": data_dir,
|
||||||
|
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
|
||||||
|
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
|
||||||
|
"www_dir": www_dir,
|
||||||
|
"www_templates_dir": pathlib.Path(www_dir).joinpath('templates'),
|
||||||
|
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
|
||||||
|
}
|
||||||
|
|
||||||
|
for conffile in os.listdir(self.internal["paths"]["internal_config"]):
|
||||||
|
conffilepath = os.path.join(self.internal["paths"]["internal_config"], conffile)
|
||||||
|
if str(conffilepath).endswith(".json"):
|
||||||
|
self.read_json_config(conffilepath)
|
||||||
|
|
||||||
|
print("Loading Configuration File: " + str(self.config_file))
|
||||||
|
self.read_ini_config(self.config_file)
|
||||||
|
|
||||||
|
self.load_versions()
|
||||||
|
|
||||||
|
def load_versions(self):
|
||||||
|
|
||||||
|
self.internal["versions"] = {}
|
||||||
|
|
||||||
|
self.internal["versions"]["fHDHR"] = fHDHR_VERSION
|
||||||
|
|
||||||
|
self.internal["versions"]["Python"] = sys.version
|
||||||
|
|
||||||
|
opersystem = platform.system()
|
||||||
|
self.internal["versions"]["Operating System"] = opersystem
|
||||||
|
if opersystem in ["Linux", "Darwin"]:
|
||||||
|
# Linux/Mac
|
||||||
|
if os.getuid() == 0 or os.geteuid() == 0:
|
||||||
|
print('Warning: Do not run fHDHR with root privileges.')
|
||||||
|
elif opersystem in ["Windows"]:
|
||||||
|
# Windows
|
||||||
|
if os.environ.get("USERNAME") == "Administrator":
|
||||||
|
print('Warning: Do not run fHDHR as Administrator.')
|
||||||
|
else:
|
||||||
|
print("Uncommon Operating System, use at your own risk.")
|
||||||
|
|
||||||
|
isdocker = is_docker()
|
||||||
|
self.internal["versions"]["Docker"] = isdocker
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
|
||||||
|
try:
|
||||||
|
ffmpeg_command = [self.dict["ffmpeg"]["path"],
|
||||||
|
"-version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
ffmpeg_version = ffmpeg_proc.stdout.read()
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
ffmpeg_version = "Missing"
|
||||||
|
print("Failed to find ffmpeg.")
|
||||||
|
self.internal["versions"]["ffmpeg"] = ffmpeg_version
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] == "vlc":
|
||||||
|
try:
|
||||||
|
vlc_command = [self.dict["vlc"]["path"],
|
||||||
|
"--version",
|
||||||
|
"pipe:stdout"
|
||||||
|
]
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(vlc_command, stdout=subprocess.PIPE)
|
||||||
|
vlc_version = vlc_proc.stdout.read()
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
vlc_version = vlc_version.decode().split("version ")[1].split('\n')[0]
|
||||||
|
except FileNotFoundError:
|
||||||
|
vlc_version = "Missing"
|
||||||
|
print("Failed to find vlc.")
|
||||||
|
self.internal["versions"]["vlc"] = vlc_version
|
||||||
|
|
||||||
|
def read_json_config(self, conffilepath):
|
||||||
|
with open(conffilepath, 'r') as jsonconf:
|
||||||
|
confimport = json.load(jsonconf)
|
||||||
|
for section in list(confimport.keys()):
|
||||||
|
|
||||||
|
if section not in self.dict.keys():
|
||||||
|
self.dict[section] = {}
|
||||||
|
|
||||||
|
if section not in self.conf_default.keys():
|
||||||
|
self.conf_default[section] = {}
|
||||||
|
|
||||||
|
for key in list(confimport[section].keys()):
|
||||||
|
|
||||||
|
if key not in list(self.conf_default[section].keys()):
|
||||||
|
self.conf_default[section][key] = {}
|
||||||
|
|
||||||
|
confvalue = confimport[section][key]["value"]
|
||||||
|
if isint(confvalue):
|
||||||
|
confvalue = int(confvalue)
|
||||||
|
elif isfloat(confvalue):
|
||||||
|
confvalue = float(confvalue)
|
||||||
|
elif is_arithmetic(confvalue):
|
||||||
|
confvalue = eval(confvalue)
|
||||||
|
elif "," in confvalue:
|
||||||
|
confvalue = confvalue.split(",")
|
||||||
|
elif str(confvalue).lower() in ["none"]:
|
||||||
|
confvalue = None
|
||||||
|
elif str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
|
||||||
|
self.dict[section][key] = confvalue
|
||||||
|
|
||||||
|
self.conf_default[section][key]["value"] = confvalue
|
||||||
|
|
||||||
|
for config_option in ["config_web_hidden", "config_file", "config_web"]:
|
||||||
|
if config_option not in list(confimport[section][key].keys()):
|
||||||
|
config_option_value = False
|
||||||
|
else:
|
||||||
|
config_option_value = confimport[section][key][config_option]
|
||||||
|
if str(config_option_value).lower() in ["none"]:
|
||||||
|
config_option_value = None
|
||||||
|
elif str(config_option_value).lower() in ["false"]:
|
||||||
|
config_option_value = False
|
||||||
|
elif str(config_option_value).lower() in ["true"]:
|
||||||
|
config_option_value = True
|
||||||
|
self.conf_default[section][key][config_option] = config_option_value
|
||||||
|
|
||||||
|
def read_ini_config(self, conffilepath):
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(conffilepath)
|
||||||
|
for each_section in config_handler.sections():
|
||||||
|
if each_section.lower() not in list(self.dict.keys()):
|
||||||
|
self.dict[each_section.lower()] = {}
|
||||||
|
for (each_key, each_val) in config_handler.items(each_section):
|
||||||
|
if not each_val:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["none"]:
|
||||||
|
each_val = None
|
||||||
|
elif each_val.lower() in ["false"]:
|
||||||
|
each_val = False
|
||||||
|
elif each_val.lower() in ["true"]:
|
||||||
|
each_val = True
|
||||||
|
elif isint(each_val):
|
||||||
|
each_val = int(each_val)
|
||||||
|
elif isfloat(each_val):
|
||||||
|
each_val = float(each_val)
|
||||||
|
elif is_arithmetic(each_val):
|
||||||
|
each_val = eval(each_val)
|
||||||
|
elif "," in each_val:
|
||||||
|
each_val = each_val.split(",")
|
||||||
|
|
||||||
|
import_val = True
|
||||||
|
if each_section in list(self.conf_default.keys()):
|
||||||
|
if each_key in list(self.conf_default[each_section].keys()):
|
||||||
|
if not self.conf_default[each_section][each_key]["config_file"]:
|
||||||
|
import_val = False
|
||||||
|
|
||||||
|
if import_val:
|
||||||
|
self.dict[each_section.lower()][each_key.lower()] = each_val
|
||||||
|
|
||||||
|
def write(self, section, key, value):
|
||||||
|
if section == self.dict["main"]["dictpopname"]:
|
||||||
|
self.dict["origin"][key] = value
|
||||||
|
else:
|
||||||
|
self.dict[section][key] = value
|
||||||
|
|
||||||
|
config_handler = configparser.ConfigParser()
|
||||||
|
config_handler.read(self.config_file)
|
||||||
|
|
||||||
|
if not config_handler.has_section(section):
|
||||||
|
config_handler.add_section(section)
|
||||||
|
|
||||||
|
config_handler.set(section, key, value)
|
||||||
|
|
||||||
|
with open(self.config_file, 'w') as config_file:
|
||||||
|
config_handler.write(config_file)
|
||||||
|
|
||||||
|
def config_verification(self):
|
||||||
|
|
||||||
|
if self.dict["main"]["thread_method"] not in ["threading", "multiprocessing"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Threading Method. Exiting...")
|
||||||
|
|
||||||
|
if self.dict["main"]["required"]:
|
||||||
|
required_missing = []
|
||||||
|
if isinstance(self.dict["main"]["required"], str):
|
||||||
|
self.dict["main"]["required"] = [self.dict["main"]["required"]]
|
||||||
|
if len(self.dict["main"]["required"]):
|
||||||
|
for req_item in self.dict["main"]["required"]:
|
||||||
|
req_section = req_item.split("/")[0]
|
||||||
|
req_key = req_item.split("/")[1]
|
||||||
|
if not self.dict[req_section][req_key]:
|
||||||
|
required_missing.append(req_item)
|
||||||
|
if len(required_missing):
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
|
||||||
|
|
||||||
|
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
|
||||||
|
|
||||||
|
if isinstance(self.dict["main"]["valid_epg_methods"], str):
|
||||||
|
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
|
||||||
|
|
||||||
|
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
|
||||||
|
if isinstance(self.dict["epg"]["method"], str):
|
||||||
|
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
|
||||||
|
epg_methods = []
|
||||||
|
for epg_method in self.dict["epg"]["method"]:
|
||||||
|
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
|
||||||
|
epg_methods.append("origin")
|
||||||
|
elif epg_method in ["None"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
elif epg_method in self.dict["main"]["valid_epg_methods"]:
|
||||||
|
epg_methods.append(epg_method)
|
||||||
|
else:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
|
||||||
|
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
|
||||||
|
|
||||||
|
if not self.dict["main"]["uuid"]:
|
||||||
|
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
|
||||||
|
self.write('main', 'uuid', self.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
if self.dict["main"]["cache_dir"]:
|
||||||
|
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
|
||||||
|
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
|
||||||
|
cache_dir = self.internal["paths"]["cache_dir"]
|
||||||
|
|
||||||
|
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
|
||||||
|
self.internal["paths"]["logs_dir"] = logs_dir
|
||||||
|
if not logs_dir.is_dir():
|
||||||
|
logs_dir.mkdir()
|
||||||
|
|
||||||
|
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
|
||||||
|
|
||||||
|
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
|
||||||
|
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
|
||||||
|
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
|
||||||
|
self.dict["fhdhr"]["discovery_address"] = None
|
||||||
|
|
||||||
|
def logging_setup(self):
|
||||||
|
|
||||||
|
log_level = self.dict["logging"]["level"].upper()
|
||||||
|
|
||||||
|
# Create a custom logger
|
||||||
|
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
|
||||||
|
logger = logging.getLogger('fHDHR')
|
||||||
|
log_file = os.path.join(self.internal["paths"]["logs_dir"], 'fHDHR.log')
|
||||||
|
|
||||||
|
# Create handlers
|
||||||
|
# c_handler = logging.StreamHandler()
|
||||||
|
f_handler = logging.FileHandler(log_file)
|
||||||
|
# c_handler.setLevel(log_level)
|
||||||
|
f_handler.setLevel(log_level)
|
||||||
|
|
||||||
|
# Create formatters and add it to handlers
|
||||||
|
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
|
||||||
|
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
|
||||||
|
# c_handler.setFormatter(c_format)
|
||||||
|
f_handler.setFormatter(f_format)
|
||||||
|
|
||||||
|
# Add handlers to the logger
|
||||||
|
# logger.addHandler(c_handler)
|
||||||
|
logger.addHandler(f_handler)
|
||||||
|
return logger
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
405
fHDHR/db/__init__.py
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# coding=utf-8
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os.path
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from sqlalchemy import Column, create_engine, String, Text
|
||||||
|
from sqlalchemy.engine.url import URL
|
||||||
|
from sqlalchemy.exc import OperationalError, SQLAlchemyError
|
||||||
|
from sqlalchemy.ext.declarative import declarative_base
|
||||||
|
from sqlalchemy.orm import scoped_session, sessionmaker
|
||||||
|
|
||||||
|
|
||||||
|
def _deserialize(value):
|
||||||
|
if value is None:
|
||||||
|
return None
|
||||||
|
# sqlite likes to return ints for strings that look like ints, even though
|
||||||
|
# the column type is string. That's how you do dynamic typing wrong.
|
||||||
|
value = str(value)
|
||||||
|
# Just in case someone's mucking with the DB in a way we can't account for,
|
||||||
|
# ignore json parsing errors
|
||||||
|
try:
|
||||||
|
value = json.loads(value)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
BASE = declarative_base()
|
||||||
|
MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
|
||||||
|
'mysql_charset': 'utf8mb4',
|
||||||
|
'mysql_collate': 'utf8mb4_unicode_ci'}
|
||||||
|
|
||||||
|
|
||||||
|
class ChannelValues(BASE):
|
||||||
|
__tablename__ = 'channel_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
channel = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class ProgramValues(BASE):
|
||||||
|
__tablename__ = 'program_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
program = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class CacheValues(BASE):
|
||||||
|
__tablename__ = 'cache_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
cacheitem = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRValues(BASE):
|
||||||
|
__tablename__ = 'fhdhr_values'
|
||||||
|
__table_args__ = MYSQL_TABLE_ARGS
|
||||||
|
item = Column(String(255), primary_key=True)
|
||||||
|
namespace = Column(String(255), primary_key=True)
|
||||||
|
key = Column(String(255), primary_key=True)
|
||||||
|
value = Column(Text())
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHRdb(object):
|
||||||
|
|
||||||
|
def __init__(self, settings):
|
||||||
|
self.config = settings
|
||||||
|
# MySQL - mysql://username:password@localhost/db
|
||||||
|
# SQLite - sqlite:////cache/path/default.db
|
||||||
|
self.type = self.config.dict["database"]["type"]
|
||||||
|
|
||||||
|
# Handle SQLite explicitly as a default
|
||||||
|
if self.type == 'sqlite':
|
||||||
|
path = self.config.dict["database"]["path"]
|
||||||
|
path = os.path.expanduser(path)
|
||||||
|
self.filename = path
|
||||||
|
self.url = 'sqlite:///%s' % path
|
||||||
|
# Otherwise, handle all other database engines
|
||||||
|
else:
|
||||||
|
query = {}
|
||||||
|
if self.type == 'mysql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mysql'
|
||||||
|
query = {'charset': 'utf8mb4'}
|
||||||
|
elif self.type == 'postgres':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'postgresql'
|
||||||
|
elif self.type == 'oracle':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'oracle'
|
||||||
|
elif self.type == 'mssql':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'mssql+pymssql'
|
||||||
|
elif self.type == 'firebird':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'firebird+fdb'
|
||||||
|
elif self.type == 'sybase':
|
||||||
|
drivername = self.config.dict["database"]["driver"] or 'sybase+pysybase'
|
||||||
|
else:
|
||||||
|
raise Exception('Unknown db_type')
|
||||||
|
|
||||||
|
db_user = self.config.dict["database"]["user"]
|
||||||
|
db_pass = self.config.dict["database"]["pass"]
|
||||||
|
db_host = self.config.dict["database"]["host"]
|
||||||
|
db_port = self.config.dict["database"]["port"] # Optional
|
||||||
|
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
|
||||||
|
|
||||||
|
# Ensure we have all our variables defined
|
||||||
|
if db_user is None or db_pass is None or db_host is None:
|
||||||
|
raise Exception('Please make sure the following core '
|
||||||
|
'configuration values are defined: '
|
||||||
|
'db_user, db_pass, db_host')
|
||||||
|
self.url = URL(drivername=drivername, username=db_user,
|
||||||
|
password=db_pass, host=db_host, port=db_port,
|
||||||
|
database=db_name, query=query)
|
||||||
|
|
||||||
|
self.engine = create_engine(self.url, pool_recycle=3600)
|
||||||
|
|
||||||
|
# Catch any errors connecting to database
|
||||||
|
try:
|
||||||
|
self.engine.connect()
|
||||||
|
except OperationalError:
|
||||||
|
print("OperationalError: Unable to connect to database.")
|
||||||
|
raise
|
||||||
|
|
||||||
|
# Create our tables
|
||||||
|
BASE.metadata.create_all(self.engine)
|
||||||
|
|
||||||
|
self.ssession = scoped_session(sessionmaker(bind=self.engine))
|
||||||
|
|
||||||
|
def connect(self):
|
||||||
|
if self.type != 'sqlite':
|
||||||
|
print(
|
||||||
|
"Raw connection requested when 'db_type' is not 'sqlite':\n"
|
||||||
|
"Consider using 'db.session()' to get a SQLAlchemy session "
|
||||||
|
"instead here:\n%s",
|
||||||
|
traceback.format_list(traceback.extract_stack()[:-1])[-1][:-1])
|
||||||
|
return self.engine.raw_connection()
|
||||||
|
|
||||||
|
def session(self):
|
||||||
|
return self.ssession()
|
||||||
|
|
||||||
|
def execute(self, *args, **kwargs):
|
||||||
|
return self.engine.execute(*args, **kwargs)
|
||||||
|
|
||||||
|
def get_uri(self):
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
# Channel Values
|
||||||
|
|
||||||
|
def set_channel_value(self, channel, key, value, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_channelvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_channel_value(self, channel, key, namespace='default'):
|
||||||
|
channel = channel.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ChannelValues) \
|
||||||
|
.filter(ChannelValues.channel == channel)\
|
||||||
|
.filter(ChannelValues.namespace == namespace)\
|
||||||
|
.filter(ChannelValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ChannelValues exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Program Values
|
||||||
|
|
||||||
|
def set_program_value(self, program, key, value, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_programvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_program_value(self, program, key, namespace='default'):
|
||||||
|
program = program.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(ProgramValues) \
|
||||||
|
.filter(ProgramValues.program == program)\
|
||||||
|
.filter(ProgramValues.namespace == namespace)\
|
||||||
|
.filter(ProgramValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# Cache Values
|
||||||
|
|
||||||
|
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
|
||||||
|
cacheitem = cacheitem.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(CacheValues) \
|
||||||
|
.filter(CacheValues.cacheitem == cacheitem)\
|
||||||
|
.filter(CacheValues.namespace == namespace)\
|
||||||
|
.filter(CacheValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
# fHDHR Values
|
||||||
|
|
||||||
|
def set_fhdhr_value(self, item, key, value, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
value = json.dumps(value, ensure_ascii=False)
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, update
|
||||||
|
if result:
|
||||||
|
result.value = value
|
||||||
|
session.commit()
|
||||||
|
# DNE - Insert
|
||||||
|
else:
|
||||||
|
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
|
||||||
|
session.add(new_cacheitemvalue)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def get_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
if result is not None:
|
||||||
|
result = result.value
|
||||||
|
return _deserialize(result)
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
|
|
||||||
|
def delete_fhdhr_value(self, item, key, namespace='default'):
|
||||||
|
item = item.lower()
|
||||||
|
session = self.ssession()
|
||||||
|
try:
|
||||||
|
result = session.query(fHDHRValues) \
|
||||||
|
.filter(fHDHRValues.item == item)\
|
||||||
|
.filter(fHDHRValues.namespace == namespace)\
|
||||||
|
.filter(fHDHRValues.key == key) \
|
||||||
|
.one_or_none()
|
||||||
|
# ProgramValue exists, delete
|
||||||
|
if result:
|
||||||
|
session.delete(result)
|
||||||
|
session.commit()
|
||||||
|
except SQLAlchemyError:
|
||||||
|
session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
session.close()
|
||||||
26
fHDHR/device/__init__.py
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
from .channels import Channels
|
||||||
|
from .epg import EPG
|
||||||
|
from .tuners import Tuners
|
||||||
|
from .images import imageHandler
|
||||||
|
from .station_scan import Station_Scan
|
||||||
|
from .ssdp import SSDPServer
|
||||||
|
from .cluster import fHDHR_Cluster
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Device():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
|
||||||
|
self.channels = Channels(fhdhr, origin)
|
||||||
|
|
||||||
|
self.epg = EPG(fhdhr, self.channels, origin)
|
||||||
|
|
||||||
|
self.tuners = Tuners(fhdhr, self.epg, self.channels)
|
||||||
|
|
||||||
|
self.images = imageHandler(fhdhr, self.epg)
|
||||||
|
|
||||||
|
self.station_scan = Station_Scan(fhdhr, self.channels)
|
||||||
|
|
||||||
|
self.ssdp = SSDPServer(fhdhr)
|
||||||
|
|
||||||
|
self.cluster = fHDHR_Cluster(fhdhr, self.ssdp)
|
||||||
81
fHDHR/device/channels/__init__.py
Normal file
@ -0,0 +1,81 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.tools import hours_between_datetime
|
||||||
|
|
||||||
|
from .channel import Channel
|
||||||
|
from .chan_ident import Channel_IDs
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
|
self.id_system = Channel_IDs(fhdhr)
|
||||||
|
|
||||||
|
self.list = {}
|
||||||
|
self.list_update_time = None
|
||||||
|
|
||||||
|
self.get_db_channels()
|
||||||
|
self.get_channels()
|
||||||
|
|
||||||
|
def get_channel_obj(self, keyfind, valfind):
|
||||||
|
return next(self.list[fhdhr_id] for fhdhr_id in list(self.list.keys()) if self.list[fhdhr_id].dict[keyfind] == valfind)
|
||||||
|
|
||||||
|
def get_channel_list(self, keyfind):
|
||||||
|
return [self.list[x].dict[keyfind] for x in list(self.list.keys())]
|
||||||
|
|
||||||
|
def set_channel_status(self, keyfind, valfind, updatedict):
|
||||||
|
self.get_channel_obj(keyfind, valfind).set_status(updatedict)
|
||||||
|
|
||||||
|
def get_db_channels(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Channel information stored in the database.")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if len(channel_ids):
|
||||||
|
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
|
||||||
|
for channel_id in channel_ids:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, channel_id=channel_id)
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
|
def get_channels(self, forceupdate=False):
|
||||||
|
"""Pull Channels from origin.
|
||||||
|
|
||||||
|
Output a list.
|
||||||
|
|
||||||
|
Don't pull more often than 12 hours.
|
||||||
|
"""
|
||||||
|
|
||||||
|
updatelist = False
|
||||||
|
if not self.list_update_time:
|
||||||
|
updatelist = True
|
||||||
|
elif hours_between_datetime(self.list_update_time, datetime.datetime.now()) > 12:
|
||||||
|
updatelist = True
|
||||||
|
elif forceupdate:
|
||||||
|
updatelist = True
|
||||||
|
|
||||||
|
if updatelist:
|
||||||
|
self.fhdhr.logger.info("Performing Channel Scan.")
|
||||||
|
channel_dict_list = self.origin.get_channels()
|
||||||
|
for channel_info in channel_dict_list:
|
||||||
|
channel_obj = Channel(self.fhdhr, self.id_system, origin_id=channel_info["id"])
|
||||||
|
channel_id = channel_obj.dict["id"]
|
||||||
|
channel_obj.basics(channel_info)
|
||||||
|
self.list[channel_id] = channel_obj
|
||||||
|
|
||||||
|
if not self.list_update_time:
|
||||||
|
self.fhdhr.logger.info("Found " + str(len(self.list)) + " channels for " + str(self.fhdhr.config.dict["main"]["servicename"]))
|
||||||
|
self.list_update_time = datetime.datetime.now()
|
||||||
|
|
||||||
|
channel_list = []
|
||||||
|
for chan_obj in list(self.list.keys()):
|
||||||
|
channel_list.append(self.list[chan_obj].dict)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
|
def get_channel_stream(self, channel_number):
|
||||||
|
return self.origin.get_channel_stream(self.get_channel_dict("number", channel_number))
|
||||||
|
|
||||||
|
def get_channel_dict(self, keyfind, valfind):
|
||||||
|
return self.get_channel_obj(keyfind, valfind).dict
|
||||||
38
fHDHR/device/channels/chan_ident.py
Normal file
@ -0,0 +1,38 @@
|
|||||||
|
import uuid
|
||||||
|
|
||||||
|
|
||||||
|
class Channel_IDs():
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get(self, origin_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
for existing_channel in existing_channel_info:
|
||||||
|
if existing_channel["origin_id"] == origin_id:
|
||||||
|
return existing_channel["id"]
|
||||||
|
return self.assign()
|
||||||
|
|
||||||
|
def assign(self):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
channel_id = None
|
||||||
|
while not channel_id:
|
||||||
|
unique_id = str(uuid.uuid4())
|
||||||
|
if str(unique_id) not in existing_ids:
|
||||||
|
channel_id = str(unique_id)
|
||||||
|
existing_ids.append(channel_id)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids)
|
||||||
|
return channel_id
|
||||||
|
|
||||||
|
def get_number(self, channel_id):
|
||||||
|
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
existing_channel_info = [self.fhdhr.db.get_channel_value(channel_id, "dict") or {} for channel_id in existing_ids]
|
||||||
|
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
|
||||||
|
if cnumber:
|
||||||
|
return cnumber
|
||||||
|
|
||||||
|
used_numbers = [existing_channel["number"] for existing_channel in existing_channel_info]
|
||||||
|
for i in range(1000, 2000):
|
||||||
|
if str(float(i)) not in used_numbers:
|
||||||
|
break
|
||||||
|
return str(float(i))
|
||||||
92
fHDHR/device/channels/channel.py
Normal file
@ -0,0 +1,92 @@
|
|||||||
|
|
||||||
|
|
||||||
|
class Channel():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, id_system, origin_id=None, channel_id=None):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.id_system = id_system
|
||||||
|
|
||||||
|
if not channel_id:
|
||||||
|
if origin_id:
|
||||||
|
channel_id = id_system.get(origin_id)
|
||||||
|
else:
|
||||||
|
channel_id = id_system.assign()
|
||||||
|
self.dict = self.fhdhr.db.get_channel_value(str(channel_id), "dict") or self.create_empty_channel(channel_id)
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
def basics(self, channel_info):
|
||||||
|
"""Some Channel Information is Critical"""
|
||||||
|
|
||||||
|
if "name" not in list(channel_info.keys()):
|
||||||
|
channel_info["name"] = self.dict["id"]
|
||||||
|
self.dict["origin_name"] = channel_info["name"]
|
||||||
|
if not self.dict["name"]:
|
||||||
|
self.dict["name"] = self.dict["origin_name"]
|
||||||
|
|
||||||
|
if "id" not in list(channel_info.keys()):
|
||||||
|
channel_info["id"] = channel_info["name"]
|
||||||
|
self.dict["origin_id"] = channel_info["id"]
|
||||||
|
|
||||||
|
if "callsign" not in list(channel_info.keys()):
|
||||||
|
channel_info["callsign"] = channel_info["name"]
|
||||||
|
self.dict["origin_callsign"] = channel_info["callsign"]
|
||||||
|
if not self.dict["callsign"]:
|
||||||
|
self.dict["callsign"] = self.dict["origin_callsign"]
|
||||||
|
|
||||||
|
if "tags" not in list(channel_info.keys()):
|
||||||
|
channel_info["tags"] = []
|
||||||
|
self.dict["origin_tags"] = channel_info["tags"]
|
||||||
|
if not self.dict["tags"]:
|
||||||
|
self.dict["tags"] = self.dict["origin_tags"]
|
||||||
|
|
||||||
|
if "number" not in list(channel_info.keys()):
|
||||||
|
channel_info["number"] = self.id_system.get_number(channel_info["id"])
|
||||||
|
self.dict["origin_number"] = str(float(channel_info["number"]))
|
||||||
|
if not self.dict["number"]:
|
||||||
|
self.dict["number"] = self.dict["origin_number"]
|
||||||
|
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
def create_empty_channel(self, channel_id):
|
||||||
|
return {
|
||||||
|
"id": str(channel_id), "origin_id": None,
|
||||||
|
"name": None, "origin_name": None,
|
||||||
|
"callsign": None, "origin_callsign": None,
|
||||||
|
"number": None, "origin_number": None,
|
||||||
|
"tags": [], "origin_tags": [],
|
||||||
|
"enabled": True
|
||||||
|
}
|
||||||
|
|
||||||
|
def destroy(self):
|
||||||
|
self.fhdhr.db.delete_channel_value(self.dict["id"], "dict")
|
||||||
|
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
|
||||||
|
if self.dict["id"] in channel_ids:
|
||||||
|
channel_ids.remove(self.dict["id"])
|
||||||
|
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids)
|
||||||
|
|
||||||
|
def set_status(self, updatedict):
|
||||||
|
for key in list(updatedict.keys()):
|
||||||
|
self.dict[key] = updatedict[key]
|
||||||
|
self.fhdhr.db.set_channel_value(self.dict["id"], "dict", self.dict)
|
||||||
|
|
||||||
|
def lineup_dict(self):
|
||||||
|
return {
|
||||||
|
'GuideNumber': self.dict['number'],
|
||||||
|
'GuideName': self.dict['name'],
|
||||||
|
'Tags': ",".join(self.dict['tags']),
|
||||||
|
'URL': self.stream_url(),
|
||||||
|
}
|
||||||
|
|
||||||
|
def stream_url(self):
|
||||||
|
return ('/auto/v%s' % self.dict['number'])
|
||||||
|
|
||||||
|
def play_url(self):
|
||||||
|
return ('/api/m3u?method=get&channel=%s' % self.dict['number'])
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if name in list(self.dict.keys()):
|
||||||
|
return self.dict[name]
|
||||||
|
else:
|
||||||
|
return None
|
||||||
165
fHDHR/device/cluster.py
Normal file
@ -0,0 +1,165 @@
|
|||||||
|
import urllib.parse
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Cluster():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, ssdp):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.ssdp = ssdp
|
||||||
|
|
||||||
|
self.friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
|
||||||
|
self.location = None
|
||||||
|
self.location_url = None
|
||||||
|
|
||||||
|
if fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
self.location = ('http://' + fhdhr.config.dict["fhdhr"]["discovery_address"] + ':' +
|
||||||
|
str(fhdhr.config.dict["fhdhr"]["port"]))
|
||||||
|
self.location_url = urllib.parse.quote(self.location)
|
||||||
|
|
||||||
|
self.startup_sync()
|
||||||
|
|
||||||
|
def cluster(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
|
||||||
|
def get_cluster_dicts_web(self):
|
||||||
|
fhdhr_list = self.cluster()
|
||||||
|
locations = []
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
item_dict = {
|
||||||
|
"base_url": fhdhr_list[location]["base_url"],
|
||||||
|
"name": fhdhr_list[location]["name"]
|
||||||
|
}
|
||||||
|
if item_dict["base_url"] != self.location:
|
||||||
|
locations.append(item_dict)
|
||||||
|
if len(locations):
|
||||||
|
locations = sorted(locations, key=lambda i: i['name'])
|
||||||
|
return locations
|
||||||
|
else:
|
||||||
|
return None
|
||||||
|
|
||||||
|
def get_list(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
return_dict = {}
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": True
|
||||||
|
}
|
||||||
|
|
||||||
|
detected_list = self.ssdp.detect_method.get()
|
||||||
|
for location in detected_list:
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
return_dict[location] = {
|
||||||
|
"Joined": False
|
||||||
|
}
|
||||||
|
return_dict = OrderedDict(sorted(return_dict.items()))
|
||||||
|
return return_dict
|
||||||
|
|
||||||
|
def default_cluster(self):
|
||||||
|
defdict = {}
|
||||||
|
defdict[self.location] = {
|
||||||
|
"base_url": self.location,
|
||||||
|
"name": self.friendlyname
|
||||||
|
}
|
||||||
|
return defdict
|
||||||
|
|
||||||
|
def startup_sync(self):
|
||||||
|
self.fhdhr.logger.info("Syncronizing with Cluster.")
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if not len(list(cluster.keys())):
|
||||||
|
self.fhdhr.logger.info("No Cluster Found.")
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Found %s clustered services." % str(len(list(cluster.keys()))))
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
self.fhdhr.logger.info("Checking Cluster Syncronization information from %s." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
retrieved_cluster = sync_open.json()
|
||||||
|
if self.location not in list(retrieved_cluster.keys()):
|
||||||
|
return self.leave()
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def leave(self):
|
||||||
|
self.fhdhr.logger.info("Leaving cluster.")
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
|
||||||
|
|
||||||
|
def disconnect(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
self.fhdhr.logger.info("Informing %s that I am departing the Cluster." % location)
|
||||||
|
sync_url = location + "/api/cluster?method=del&location=" + self.location
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.leave()
|
||||||
|
|
||||||
|
def sync(self, location):
|
||||||
|
sync_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
sync_open = self.fhdhr.web.session.get(sync_url)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", sync_open.json())
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def push_sync(self):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
for location in list(cluster.keys()):
|
||||||
|
if location != self.location:
|
||||||
|
sync_url = location + "/api/cluster?method=sync&location=" + self.location_url
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
|
||||||
|
def add(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location not in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Adding %s to cluster." % location)
|
||||||
|
cluster[location] = {"base_url": location}
|
||||||
|
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
try:
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
cluster[location]["name"] = location_info["FriendlyName"]
|
||||||
|
|
||||||
|
cluster_info_url = location + "/api/cluster?method=get"
|
||||||
|
try:
|
||||||
|
cluster_info_req = self.fhdhr.web.session.get(cluster_info_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
del cluster[location]
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
return
|
||||||
|
cluster_info = cluster_info_req.json()
|
||||||
|
for cluster_key in list(cluster_info.keys()):
|
||||||
|
if cluster_key not in list(cluster.keys()):
|
||||||
|
cluster[cluster_key] = cluster_info[cluster_key]
|
||||||
|
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
|
self.push_sync()
|
||||||
|
|
||||||
|
def remove(self, location):
|
||||||
|
cluster = self.fhdhr.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
|
||||||
|
if location in list(cluster.keys()):
|
||||||
|
self.fhdhr.logger.info("Removing %s from cluster." % location)
|
||||||
|
del cluster[location]
|
||||||
|
sync_url = location + "/api/cluster?method=leave"
|
||||||
|
try:
|
||||||
|
self.fhdhr.web.session.get(sync_url)
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
self.push_sync()
|
||||||
|
self.fhdhr.db.set_fhdhr_value("cluster", "dict", cluster)
|
||||||
187
fHDHR/device/epg.py
Normal file
@ -0,0 +1,187 @@
|
|||||||
|
import os
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
epgtype_list = []
|
||||||
|
device_dir = os.path.dirname(__file__)
|
||||||
|
for entry in os.scandir(device_dir + '/epgtypes'):
|
||||||
|
if entry.is_file():
|
||||||
|
if entry.name[0] != '_':
|
||||||
|
epgtype_list.append(str(entry.name[:-3]))
|
||||||
|
impstring = f'from .epgtypes import {entry.name}'[:-3]
|
||||||
|
exec(impstring)
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.origin = origin
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.epgdict = {}
|
||||||
|
|
||||||
|
self.epg_method_selfadd()
|
||||||
|
|
||||||
|
self.epg_methods = self.fhdhr.config.dict["epg"]["method"]
|
||||||
|
self.valid_epg_methods = [x for x in self.fhdhr.config.dict["main"]["valid_epg_methods"] if x and x not in [None, "None"]]
|
||||||
|
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
|
||||||
|
self.sleeptime = {}
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if epg_method in list(self.fhdhr.config.dict.keys()):
|
||||||
|
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
|
||||||
|
if epg_method not in list(self.sleeptime.keys()):
|
||||||
|
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
|
||||||
|
|
||||||
|
def clear_epg_cache(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Clearing " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
method_to_call = getattr(self, method)
|
||||||
|
if hasattr(method_to_call, 'clear_cache'):
|
||||||
|
func_to_call = getattr(method_to_call, 'clear_cache')
|
||||||
|
func_to_call()
|
||||||
|
|
||||||
|
if method in list(self.epgdict.keys()):
|
||||||
|
del self.epgdict[method]
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
|
||||||
|
|
||||||
|
def whats_on_now(self, channel, method=None):
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
listings = epgdict[channel]["listing"]
|
||||||
|
for listing in listings:
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
|
||||||
|
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
if start_time <= nowtime <= end_time:
|
||||||
|
epgitem = epgdict[channel].copy()
|
||||||
|
epgitem["listing"] = [listing]
|
||||||
|
return epgitem
|
||||||
|
return None
|
||||||
|
|
||||||
|
def whats_on_allchans(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
channel_guide_list = []
|
||||||
|
epgdict = self.get_epg(method)
|
||||||
|
channels = list(epgdict.keys())
|
||||||
|
for channel in channels:
|
||||||
|
whatson = self.whats_on_now(epgdict[channel]["number"], method)
|
||||||
|
if whatson:
|
||||||
|
channel_guide_list.append(whatson)
|
||||||
|
return channel_guide_list
|
||||||
|
|
||||||
|
def get_epg(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
if method not in list(self.epgdict.keys()):
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or None
|
||||||
|
if not epgdict:
|
||||||
|
self.update(method)
|
||||||
|
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
|
||||||
|
else:
|
||||||
|
self.epgdict[method] = epgdict
|
||||||
|
return self.epgdict[method]
|
||||||
|
else:
|
||||||
|
return self.epgdict[method]
|
||||||
|
|
||||||
|
def get_thumbnail(self, itemtype, itemid):
|
||||||
|
if itemtype == "channel":
|
||||||
|
chandict = self.find_channel_dict(itemid)
|
||||||
|
return chandict["thumbnail"]
|
||||||
|
elif itemtype == "content":
|
||||||
|
progdict = self.find_program_dict(itemid)
|
||||||
|
return progdict["thumbnail"]
|
||||||
|
return None
|
||||||
|
|
||||||
|
def find_channel_dict(self, channel_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
channel_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
channel_list.append(epgdict[channel])
|
||||||
|
return next(item for item in channel_list if item["id"] == channel_id)
|
||||||
|
|
||||||
|
def find_program_dict(self, event_id):
|
||||||
|
epgdict = self.get_epg()
|
||||||
|
event_list = []
|
||||||
|
for channel in list(epgdict.keys()):
|
||||||
|
event_list.extend(epgdict[channel]["listing"])
|
||||||
|
return next(item for item in event_list if item["id"] == event_id)
|
||||||
|
|
||||||
|
def epg_method_selfadd(self):
|
||||||
|
self.fhdhr.logger.info("Checking for Optional EPG methods.")
|
||||||
|
for method in epgtype_list:
|
||||||
|
self.fhdhr.logger.info("Found %s EPG method." % method)
|
||||||
|
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.fhdhr, self.channels)"))
|
||||||
|
|
||||||
|
def update(self, method=None):
|
||||||
|
|
||||||
|
if not method:
|
||||||
|
method = self.def_method
|
||||||
|
if (method == self.fhdhr.config.dict["main"]["dictpopname"] or
|
||||||
|
method not in self.fhdhr.config.dict["main"]["valid_epg_methods"]):
|
||||||
|
method = "origin"
|
||||||
|
|
||||||
|
epgtypename = method
|
||||||
|
if method in [self.fhdhr.config.dict["main"]["dictpopname"], "origin"]:
|
||||||
|
epgtypename = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Updating " + epgtypename + " EPG cache.")
|
||||||
|
method_to_call = getattr(self, method)
|
||||||
|
func_to_call = getattr(method_to_call, 'update_epg')
|
||||||
|
if method == 'origin':
|
||||||
|
programguide = func_to_call(self.channels)
|
||||||
|
else:
|
||||||
|
programguide = func_to_call()
|
||||||
|
|
||||||
|
for chan in list(programguide.keys()):
|
||||||
|
floatnum = str(float(chan))
|
||||||
|
programguide[floatnum] = programguide.pop(chan)
|
||||||
|
programguide[floatnum]["number"] = floatnum
|
||||||
|
|
||||||
|
programguide = OrderedDict(sorted(programguide.items()))
|
||||||
|
|
||||||
|
for cnum in programguide:
|
||||||
|
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
|
||||||
|
|
||||||
|
self.epgdict = programguide
|
||||||
|
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
|
||||||
|
self.fhdhr.logger.info("Wrote " + epgtypename + " EPG cache.")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
self.update(epg_method)
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
for epg_method in self.epg_methods:
|
||||||
|
if time.time() >= (self.fhdhr.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
|
||||||
|
self.update(epg_method)
|
||||||
|
time.sleep(3600)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
pass
|
||||||
0
fHDHR/device/epgtypes/__init__.py
Normal file
64
fHDHR/device/epgtypes/blocks.py
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
|
||||||
|
class blocksEPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
def update_epg(self):
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
timestamps = []
|
||||||
|
todaydate = datetime.date.today()
|
||||||
|
for x in range(0, 6):
|
||||||
|
xdate = todaydate + datetime.timedelta(days=x)
|
||||||
|
xtdate = xdate + datetime.timedelta(days=1)
|
||||||
|
|
||||||
|
for hour in range(0, 24):
|
||||||
|
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
|
||||||
|
if hour + 1 < 24:
|
||||||
|
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
|
||||||
|
else:
|
||||||
|
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
|
||||||
|
timestampdict = {
|
||||||
|
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
|
||||||
|
}
|
||||||
|
timestamps.append(timestampdict)
|
||||||
|
|
||||||
|
for c in self.channels.get_channels():
|
||||||
|
if str(c["number"]) not in list(programguide.keys()):
|
||||||
|
programguide[str(c["number"])] = {
|
||||||
|
"callsign": c["callsign"],
|
||||||
|
"name": c["name"],
|
||||||
|
"number": c["number"],
|
||||||
|
"id": c["origin_id"],
|
||||||
|
"thumbnail": ("/api/images?method=generate&type=channel&message=%s" % (str(c['number']))),
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
for timestamp in timestamps:
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": timestamp['time_start'],
|
||||||
|
"time_end": timestamp['time_end'],
|
||||||
|
"duration_minutes": 60,
|
||||||
|
"thumbnail": ("/api/images?method=generate&type=content&message=%s" % (str(c["origin_id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
|
||||||
|
"title": "Unavailable",
|
||||||
|
"sub-title": "Unavailable",
|
||||||
|
"description": "Unavailable",
|
||||||
|
"rating": "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": str(c["origin_id"]) + "_" + str(timestamp['time_start']).split(" ")[0],
|
||||||
|
}
|
||||||
|
|
||||||
|
programguide[str(c["number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
return programguide
|
||||||
60
fHDHR/device/images.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
from io import BytesIO
|
||||||
|
import PIL.Image
|
||||||
|
import PIL.ImageDraw
|
||||||
|
import PIL.ImageFont
|
||||||
|
|
||||||
|
|
||||||
|
class imageHandler():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def get_epg_image(self, image_type, content_id):
|
||||||
|
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
|
||||||
|
if not imageUri:
|
||||||
|
return self.generate_image(image_type, str(content_id))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(imageUri)
|
||||||
|
return req.content
|
||||||
|
|
||||||
|
def getSize(self, txt, font):
|
||||||
|
testImg = PIL.Image.new('RGB', (1, 1))
|
||||||
|
testDraw = PIL.ImageDraw.Draw(testImg)
|
||||||
|
return testDraw.textsize(txt, font)
|
||||||
|
|
||||||
|
def generate_image(self, messagetype, message):
|
||||||
|
if messagetype == "channel":
|
||||||
|
width = 360
|
||||||
|
height = 270
|
||||||
|
fontsize = 72
|
||||||
|
elif messagetype == "content":
|
||||||
|
width = 1080
|
||||||
|
height = 1440
|
||||||
|
fontsize = 100
|
||||||
|
|
||||||
|
colorBackground = "#228822"
|
||||||
|
colorText = "#717D7E"
|
||||||
|
colorOutline = "#717D7E"
|
||||||
|
fontname = str(self.fhdhr.config.internal["paths"]["font"])
|
||||||
|
|
||||||
|
font = PIL.ImageFont.truetype(fontname, fontsize)
|
||||||
|
text_width, text_height = self.getSize(message, font)
|
||||||
|
img = PIL.Image.new('RGBA', (width+4, height+4), colorBackground)
|
||||||
|
d = PIL.ImageDraw.Draw(img)
|
||||||
|
d.text(((width-text_width)/2, (height-text_height)/2), message, fill=colorText, font=font)
|
||||||
|
d.rectangle((0, 0, width+3, height+3), outline=colorOutline)
|
||||||
|
|
||||||
|
s = BytesIO()
|
||||||
|
img.save(s, 'png')
|
||||||
|
return s.getvalue()
|
||||||
|
|
||||||
|
def get_image_type(self, image_data):
|
||||||
|
header_byte = image_data[0:3].hex().lower()
|
||||||
|
if header_byte == '474946':
|
||||||
|
return "image/gif"
|
||||||
|
elif header_byte == '89504e':
|
||||||
|
return "image/png"
|
||||||
|
elif header_byte == 'ffd8ff':
|
||||||
|
return "image/jpeg"
|
||||||
|
else:
|
||||||
|
return "image/jpeg"
|
||||||
196
fHDHR/device/ssdp.py
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
|
||||||
|
import socket
|
||||||
|
import struct
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Detect():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("ssdp_detect", "list")
|
||||||
|
|
||||||
|
def set(self, location):
|
||||||
|
detect_list = self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
if location not in detect_list:
|
||||||
|
detect_list.append(location)
|
||||||
|
self.fhdhr.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.fhdhr.db.get_fhdhr_value("ssdp_detect", "list") or []
|
||||||
|
|
||||||
|
|
||||||
|
class SSDPServer():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.detect_method = fHDHR_Detect(fhdhr)
|
||||||
|
|
||||||
|
if fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
self.sock = None
|
||||||
|
self.proto = "ipv4"
|
||||||
|
self.port = 1900
|
||||||
|
self.iface = None
|
||||||
|
self.address = None
|
||||||
|
self.server = 'fHDHR/%s UPnP/1.0' % fhdhr.version
|
||||||
|
|
||||||
|
allowed_protos = ("ipv4", "ipv6")
|
||||||
|
if self.proto not in allowed_protos:
|
||||||
|
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
|
||||||
|
|
||||||
|
self.nt = 'urn:schemas-upnp-org:device:MediaServer:1'
|
||||||
|
self.usn = 'uuid:' + fhdhr.config.dict["main"]["uuid"] + '::' + self.nt
|
||||||
|
self.location = ('http://' + fhdhr.config.dict["fhdhr"]["discovery_address"] + ':' +
|
||||||
|
str(fhdhr.config.dict["fhdhr"]["port"]) + '/device.xml')
|
||||||
|
self.al = self.location
|
||||||
|
self.max_age = 1800
|
||||||
|
self._iface = None
|
||||||
|
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
self._af_type = socket.AF_INET
|
||||||
|
self._broadcast_ip = "239.255.255.250"
|
||||||
|
self._address = (self._broadcast_ip, self.port)
|
||||||
|
self.bind_address = "0.0.0.0"
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
self._af_type = socket.AF_INET6
|
||||||
|
self._broadcast_ip = "ff02::c"
|
||||||
|
self._address = (self._broadcast_ip, self.port, 0, 0)
|
||||||
|
self.bind_address = "::"
|
||||||
|
|
||||||
|
self.broadcast_addy = "{}:{}".format(self._broadcast_ip, self.port)
|
||||||
|
|
||||||
|
self.sock = socket.socket(self._af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
|
||||||
|
# Bind to specific interface
|
||||||
|
if self.iface is not None:
|
||||||
|
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
|
||||||
|
|
||||||
|
# Subscribe to multicast address
|
||||||
|
if self.proto == "ipv4":
|
||||||
|
mreq = socket.inet_aton(self._broadcast_ip)
|
||||||
|
if self.address is not None:
|
||||||
|
mreq += socket.inet_aton(self.address)
|
||||||
|
else:
|
||||||
|
mreq += struct.pack(b"@I", socket.INADDR_ANY)
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq,
|
||||||
|
)
|
||||||
|
# Allow multicasts on loopback devices (necessary for testing)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
|
||||||
|
elif self.proto == "ipv6":
|
||||||
|
# In IPv6 we use the interface index, not the address when subscribing to the group
|
||||||
|
mreq = socket.inet_pton(socket.AF_INET6, self._broadcast_ip)
|
||||||
|
if self.iface is not None:
|
||||||
|
iface_index = socket.if_nametoindex(self.iface)
|
||||||
|
# Send outgoing packets from the same interface
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
|
||||||
|
mreq += struct.pack(b"@I", iface_index)
|
||||||
|
else:
|
||||||
|
mreq += socket.inet_pton(socket.AF_INET6, "::")
|
||||||
|
self.sock.setsockopt(
|
||||||
|
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
|
||||||
|
)
|
||||||
|
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
|
||||||
|
self.sock.bind((self.bind_address, self.port))
|
||||||
|
|
||||||
|
self.notify_payload = self.create_notify_payload()
|
||||||
|
self.msearch_payload = self.create_msearch_payload()
|
||||||
|
|
||||||
|
self.m_search()
|
||||||
|
|
||||||
|
def on_recv(self, data, address):
|
||||||
|
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
|
||||||
|
|
||||||
|
(host, port) = address
|
||||||
|
|
||||||
|
try:
|
||||||
|
header, payload = data.decode().split('\r\n\r\n')[:2]
|
||||||
|
except ValueError:
|
||||||
|
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
|
||||||
|
return
|
||||||
|
|
||||||
|
lines = header.split('\r\n')
|
||||||
|
cmd = lines[0].split(' ')
|
||||||
|
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
|
||||||
|
lines = filter(lambda x: len(x) > 0, lines)
|
||||||
|
|
||||||
|
headers = [x.split(':', 1) for x in lines]
|
||||||
|
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
|
||||||
|
|
||||||
|
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
|
||||||
|
# SSDP discovery
|
||||||
|
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
|
||||||
|
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
|
||||||
|
notify = self.notify_payload
|
||||||
|
self.fhdhr.logger.debug("Created NOTIFY: {}".format(notify))
|
||||||
|
try:
|
||||||
|
self.sock.sendto(notify, address)
|
||||||
|
except OSError as e:
|
||||||
|
# Most commonly: We received a multicast from an IP not in our subnet
|
||||||
|
self.fhdhr.logger.debug("Unable to send NOTIFY to {}: {}".format(address, e))
|
||||||
|
pass
|
||||||
|
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
|
||||||
|
# SSDP presence
|
||||||
|
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
|
||||||
|
try:
|
||||||
|
if headers["server"].startswith("fHDHR"):
|
||||||
|
if headers["location"] != self.location:
|
||||||
|
self.detect_method.set(headers["location"].split("/device.xml")[0])
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
|
||||||
|
|
||||||
|
def m_search(self):
|
||||||
|
data = self.msearch_payload
|
||||||
|
self.sock.sendto(data, self._address)
|
||||||
|
|
||||||
|
def create_notify_payload(self):
|
||||||
|
if self.max_age is not None and not isinstance(self.max_age, int):
|
||||||
|
raise ValueError("max_age must by of type: int")
|
||||||
|
data = (
|
||||||
|
"NOTIFY * HTTP/1.1\r\n"
|
||||||
|
"HOST:{}\r\n"
|
||||||
|
"NT:{}\r\n"
|
||||||
|
"NTS:ssdp:alive\r\n"
|
||||||
|
"USN:{}\r\n"
|
||||||
|
"SERVER:{}\r\n"
|
||||||
|
).format(
|
||||||
|
self._broadcast_ip,
|
||||||
|
self.nt,
|
||||||
|
self.usn,
|
||||||
|
self.server
|
||||||
|
)
|
||||||
|
if self.location is not None:
|
||||||
|
data += "LOCATION:{}\r\n".format(self.location)
|
||||||
|
if self.al is not None:
|
||||||
|
data += "AL:{}\r\n".format(self.al)
|
||||||
|
if self.max_age is not None:
|
||||||
|
data += "Cache-Control:max-age={}\r\n".format(self.max_age)
|
||||||
|
data += "\r\n"
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def create_msearch_payload(self):
|
||||||
|
data = (
|
||||||
|
"M-SEARCH * HTTP/1.1\r\n"
|
||||||
|
"HOST:{}\r\n"
|
||||||
|
'MAN: "ssdp:discover"\r\n'
|
||||||
|
"ST:{}\r\n"
|
||||||
|
"MX:{}\r\n"
|
||||||
|
).format(
|
||||||
|
self.broadcast_addy,
|
||||||
|
"ssdp:all",
|
||||||
|
1
|
||||||
|
)
|
||||||
|
data += "\r\n"
|
||||||
|
return data.encode("utf-8")
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
data, address = self.sock.recvfrom(1024)
|
||||||
|
self.on_recv(data, address)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.sock.close()
|
||||||
34
fHDHR/device/station_scan.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
from multiprocessing import Process
|
||||||
|
|
||||||
|
|
||||||
|
class Station_Scan():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scan(self):
|
||||||
|
self.fhdhr.logger.info("Channel Scan Requested by Client.")
|
||||||
|
|
||||||
|
scan_status = self.fhdhr.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if not scan_status:
|
||||||
|
self.fhdhr.db.set_fhdhr_value("station_scan", "scanning", 1)
|
||||||
|
chanscan = Process(target=self.runscan)
|
||||||
|
chanscan.start()
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Channel Scan Already In Progress!")
|
||||||
|
|
||||||
|
def runscan(self):
|
||||||
|
self.channels.get_channels(forceupdate=True)
|
||||||
|
self.fhdhr.logger.info("Requested Channel Scan Complete.")
|
||||||
|
self.fhdhr.db.delete_fhdhr_value("station_scan", "scanning")
|
||||||
|
|
||||||
|
def scanning(self):
|
||||||
|
scan_status = self.fhdhr.db.get_fhdhr_value("station_scan", "scanning")
|
||||||
|
if not scan_status:
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
88
fHDHR/device/tuners/__init__.py
Normal file
@ -0,0 +1,88 @@
|
|||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class Tuners():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, epg, channels):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.channels = channels
|
||||||
|
|
||||||
|
self.epg = epg
|
||||||
|
self.max_tuners = int(self.fhdhr.config.dict["fhdhr"]["tuner_count"])
|
||||||
|
|
||||||
|
self.tuners = {}
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Creating %s tuners." % str(self.max_tuners))
|
||||||
|
|
||||||
|
for i in range(1, self.max_tuners + 1):
|
||||||
|
self.tuners[i] = Tuner(fhdhr, i, epg)
|
||||||
|
|
||||||
|
def tuner_grab(self, tuner_number):
|
||||||
|
|
||||||
|
if int(tuner_number) not in list(self.tuners.keys()):
|
||||||
|
self.fhdhr.logger.error("Tuner %s does not exist." % str(tuner_number))
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
# TunerError will raise if unavailable
|
||||||
|
self.tuners[int(tuner_number)].grab()
|
||||||
|
|
||||||
|
return tuner_number
|
||||||
|
|
||||||
|
def first_available(self):
|
||||||
|
|
||||||
|
if not self.available_tuner_count():
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
try:
|
||||||
|
self.tuners[int(tunernum)].grab()
|
||||||
|
except TunerError:
|
||||||
|
continue
|
||||||
|
else:
|
||||||
|
return tunernum
|
||||||
|
|
||||||
|
raise TunerError("805 - All Tuners In Use")
|
||||||
|
|
||||||
|
def tuner_close(self, tunernum):
|
||||||
|
self.tuners[int(tunernum)].close()
|
||||||
|
|
||||||
|
def status(self):
|
||||||
|
all_status = {}
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
all_status[tunernum] = self.tuners[int(tunernum)].get_status()
|
||||||
|
return all_status
|
||||||
|
|
||||||
|
def available_tuner_count(self):
|
||||||
|
available_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
tuner_status = self.tuners[int(tunernum)].get_status()
|
||||||
|
if tuner_status["status"] == "Inactive":
|
||||||
|
available_tuners += 1
|
||||||
|
return available_tuners
|
||||||
|
|
||||||
|
def inuse_tuner_count(self):
|
||||||
|
inuse_tuners = 0
|
||||||
|
for tunernum in list(self.tuners.keys()):
|
||||||
|
tuner_status = self.tuners[int(tunernum)].get_status()
|
||||||
|
if tuner_status["status"] == "Active":
|
||||||
|
inuse_tuners += 1
|
||||||
|
return inuse_tuners
|
||||||
|
|
||||||
|
def get_stream_info(self, stream_args):
|
||||||
|
|
||||||
|
stream_args["channelUri"] = self.channels.get_channel_stream(str(stream_args["channel"]))
|
||||||
|
if not stream_args["channelUri"]:
|
||||||
|
raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
channelUri_headers = self.fhdhr.web.session.head(stream_args["channelUri"]).headers
|
||||||
|
stream_args["true_content_type"] = channelUri_headers['Content-Type']
|
||||||
|
|
||||||
|
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
|
stream_args["content_type"] = "video/mpeg"
|
||||||
|
else:
|
||||||
|
stream_args["content_type"] = stream_args["true_content_type"]
|
||||||
|
|
||||||
|
return stream_args
|
||||||
22
fHDHR/device/tuners/stream/__init__.py
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .direct_stream import Direct_Stream
|
||||||
|
from .ffmpeg_stream import FFMPEG_Stream
|
||||||
|
from .vlc_stream import VLC_Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
|
||||||
|
if stream_args["method"] == "ffmpeg":
|
||||||
|
self.method = FFMPEG_Stream(fhdhr, stream_args, tuner)
|
||||||
|
if stream_args["method"] == "vlc":
|
||||||
|
self.method = VLC_Stream(fhdhr, stream_args, tuner)
|
||||||
|
elif stream_args["method"] == "direct":
|
||||||
|
self.method = Direct_Stream(fhdhr, stream_args, tuner)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
return self.method.get()
|
||||||
139
fHDHR/device/tuners/stream/direct_stream.py
Normal file
@ -0,0 +1,139 @@
|
|||||||
|
import time
|
||||||
|
import m3u8
|
||||||
|
|
||||||
|
from Crypto.Cipher import AES
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Direct_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.chunksize = int(self.fhdhr.config.dict["direct_stream"]['chunksize'])
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
if not self.stream_args["duration"] == 0:
|
||||||
|
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
|
||||||
|
|
||||||
|
if not self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["channelUri"]))
|
||||||
|
|
||||||
|
req = self.fhdhr.web.session.get(self.stream_args["channelUri"], stream=True)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
chunk_counter = 1
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
for chunk in req.iter_content(chunk_size=self.chunksize):
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
req.close()
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s" % (chunk_counter, self.chunksize))
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
chunk_counter += 1
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
req.close()
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
else:
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Detected stream URL is m3u8: %s" % self.stream_args["true_content_type"])
|
||||||
|
|
||||||
|
channelUri = self.stream_args["channelUri"]
|
||||||
|
while True:
|
||||||
|
|
||||||
|
videoUrlM3u = m3u8.load(channelUri)
|
||||||
|
if len(videoUrlM3u.playlists):
|
||||||
|
channelUri = videoUrlM3u.playlists[0].absolute_uri
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
|
||||||
|
try:
|
||||||
|
|
||||||
|
played_chunk_urls = []
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
playlist = m3u8.load(channelUri)
|
||||||
|
segments = playlist.segments
|
||||||
|
|
||||||
|
if len(played_chunk_urls):
|
||||||
|
newsegments = 0
|
||||||
|
for segment in segments:
|
||||||
|
if segment.absolute_uri not in played_chunk_urls:
|
||||||
|
newsegments += 1
|
||||||
|
self.fhdhr.logger.info("Refreshing m3u8, Loaded %s new segments." % str(newsegments))
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info("Loaded %s segments." % str(len(segments)))
|
||||||
|
|
||||||
|
if playlist.keys != [None]:
|
||||||
|
keys = [{"url": key.uri, "method": key.method, "iv": key.iv} for key in playlist.keys if key]
|
||||||
|
else:
|
||||||
|
keys = [None for i in range(0, len(segments))]
|
||||||
|
|
||||||
|
for segment, key in zip(segments, keys):
|
||||||
|
chunkurl = segment.absolute_uri
|
||||||
|
|
||||||
|
if chunkurl not in played_chunk_urls:
|
||||||
|
played_chunk_urls.append(chunkurl)
|
||||||
|
|
||||||
|
if (not self.stream_args["duration"] == 0 and
|
||||||
|
not time.time() < self.stream_args["time_end"]):
|
||||||
|
self.fhdhr.logger.info("Requested Duration Expired.")
|
||||||
|
self.tuner.close()
|
||||||
|
|
||||||
|
chunk = self.fhdhr.web.session.get(chunkurl).content
|
||||||
|
if not chunk:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
if key:
|
||||||
|
keyfile = self.fhdhr.web.session.get(key["url"]).content
|
||||||
|
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
|
||||||
|
chunk = cryptor.decrypt(chunk)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Passing Through Chunk: %s" % chunkurl)
|
||||||
|
yield chunk
|
||||||
|
|
||||||
|
if playlist.target_duration:
|
||||||
|
time.sleep(int(playlist.target_duration))
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
127
fHDHR/device/tuners/stream/ffmpeg_stream.py
Normal file
@ -0,0 +1,127 @@
|
|||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class FFMPEG_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["ffmpeg"]["bytes_per_read"])
|
||||||
|
self.ffmpeg_command = self.ffmpeg_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
ffmpeg_proc = subprocess.Popen(self.ffmpeg_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
videoData = ffmpeg_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not videoData:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield videoData
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
ffmpeg_proc.terminate()
|
||||||
|
ffmpeg_proc.communicate()
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def ffmpeg_command_assemble(self, stream_args):
|
||||||
|
ffmpeg_command = [
|
||||||
|
self.fhdhr.config.dict["ffmpeg"]["path"],
|
||||||
|
"-i", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_duration(stream_args))
|
||||||
|
ffmpeg_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
ffmpeg_command.extend(self.ffmpeg_loglevel())
|
||||||
|
ffmpeg_command.extend(["pipe:stdout"])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_duration(self, stream_args):
|
||||||
|
ffmpeg_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
ffmpeg_command.extend(["-t", str(stream_args["duration"])])
|
||||||
|
else:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-reconnect", "1",
|
||||||
|
"-reconnect_at_eof", "1",
|
||||||
|
"-reconnect_streamed", "1",
|
||||||
|
"-reconnect_delay_max", "2",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def ffmpeg_loglevel(self):
|
||||||
|
ffmpeg_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "debug",
|
||||||
|
"info": "info",
|
||||||
|
"error": "error",
|
||||||
|
"warning": "warning",
|
||||||
|
"critical": "fatal",
|
||||||
|
}
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
ffmpeg_command.extend(["-nostats", "-hide_banner"])
|
||||||
|
ffmpeg_command.extend(["-loglevel", loglevel_dict[log_level]])
|
||||||
|
return ffmpeg_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
ffmpeg_command = []
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
ffmpeg_command.extend(
|
||||||
|
[
|
||||||
|
"-c", "copy",
|
||||||
|
"-f", "mpegts",
|
||||||
|
]
|
||||||
|
)
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
ffmpeg_command.extend([])
|
||||||
|
|
||||||
|
return ffmpeg_command
|
||||||
118
fHDHR/device/tuners/stream/vlc_stream.py
Normal file
@ -0,0 +1,118 @@
|
|||||||
|
import subprocess
|
||||||
|
|
||||||
|
# from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class VLC_Stream():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, stream_args, tuner):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.stream_args = stream_args
|
||||||
|
self.tuner = tuner
|
||||||
|
|
||||||
|
self.bytes_per_read = int(self.fhdhr.config.dict["vlc"]["bytes_per_read"])
|
||||||
|
self.vlc_command = self.vlc_command_assemble(stream_args)
|
||||||
|
|
||||||
|
def get(self):
|
||||||
|
|
||||||
|
vlc_proc = subprocess.Popen(self.vlc_command, stdout=subprocess.PIPE)
|
||||||
|
|
||||||
|
def generate():
|
||||||
|
try:
|
||||||
|
|
||||||
|
while self.tuner.tuner_lock.locked():
|
||||||
|
|
||||||
|
videoData = vlc_proc.stdout.read(self.bytes_per_read)
|
||||||
|
if not videoData:
|
||||||
|
break
|
||||||
|
# raise TunerError("807 - No Video Data")
|
||||||
|
yield videoData
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
|
||||||
|
except GeneratorExit:
|
||||||
|
self.fhdhr.logger.info("Connection Closed.")
|
||||||
|
except Exception as e:
|
||||||
|
self.fhdhr.logger.info("Connection Closed: " + str(e))
|
||||||
|
finally:
|
||||||
|
vlc_proc.terminate()
|
||||||
|
vlc_proc.communicate()
|
||||||
|
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
|
||||||
|
self.tuner.close()
|
||||||
|
# raise TunerError("806 - Tune Failed")
|
||||||
|
|
||||||
|
return generate()
|
||||||
|
|
||||||
|
def vlc_command_assemble(self, stream_args):
|
||||||
|
vlc_command = [
|
||||||
|
self.fhdhr.config.dict["vlc"]["path"],
|
||||||
|
"-I", "dummy", stream_args["channelUri"],
|
||||||
|
]
|
||||||
|
vlc_command.extend(self.vlc_duration(stream_args))
|
||||||
|
vlc_command.extend(self.vlc_loglevel())
|
||||||
|
vlc_command.extend(["--sout"])
|
||||||
|
vlc_command.extend(self.transcode_profiles(stream_args))
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_duration(self, stream_args):
|
||||||
|
vlc_command = []
|
||||||
|
if stream_args["duration"]:
|
||||||
|
vlc_command.extend(["--run-time=%s" % str(stream_args["duration"])])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def vlc_loglevel(self):
|
||||||
|
vlc_command = []
|
||||||
|
log_level = self.fhdhr.config.dict["logging"]["level"].lower()
|
||||||
|
|
||||||
|
loglevel_dict = {
|
||||||
|
"debug": "3",
|
||||||
|
"info": "0",
|
||||||
|
"error": "1",
|
||||||
|
"warning": "2",
|
||||||
|
"critical": "1",
|
||||||
|
}
|
||||||
|
vlc_command.extend(["--log-verbose=", loglevel_dict[log_level]])
|
||||||
|
if log_level not in ["info", "debug"]:
|
||||||
|
vlc_command.extend(["--quiet"])
|
||||||
|
return vlc_command
|
||||||
|
|
||||||
|
def transcode_profiles(self, stream_args):
|
||||||
|
# TODO implement actual profiles here
|
||||||
|
"""
|
||||||
|
• heavy: transcode to AVC with the same resolution, frame-rate, and interlacing as the
|
||||||
|
original stream. For example 1080i60 AVC 1080i60, 720p60 AVC 720p60. → →
|
||||||
|
• mobile: trancode to AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet720: transcode to low bitrate AVC progressive not exceeding 1280x720 30fps.
|
||||||
|
• internet480: transcode to low bitrate AVC progressive not exceeding 848x480 30fps for
|
||||||
|
16:9 content, not exceeding 640x480 30fps for 4:3 content.
|
||||||
|
• internet360: transcode to low bitrate AVC progressive not exceeding 640x360 30fps for
|
||||||
|
16:9 content, not exceeding 480x360 30fps for 4:3 content.
|
||||||
|
• internet240: transcode to low bitrate AVC progressive not exceeding 432x240 30fps for
|
||||||
|
16:9 content, not exceeding 320x240 30fps for 4:3 content
|
||||||
|
"""
|
||||||
|
vlc_command = []
|
||||||
|
|
||||||
|
if stream_args["transcode"]:
|
||||||
|
self.fhdhr.logger.info("Client requested a " + stream_args["transcode"] + " transcode for stream.")
|
||||||
|
stream_args["transcode"] = None
|
||||||
|
|
||||||
|
vlc_transcode_string = "#std{mux=ts,access=file,dst=-}"
|
||||||
|
return [vlc_transcode_string]
|
||||||
|
|
||||||
|
'#transcode{vcodec=mp2v,vb=4096,acodec=mp2a,ab=192,scale=1,channels=2,deinterlace}:std{access=file,mux=ts,dst=-"}'
|
||||||
|
|
||||||
|
if not stream_args["transcode"]:
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "heavy":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "mobile":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet720":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet480":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet360":
|
||||||
|
vlc_command.extend([])
|
||||||
|
elif stream_args["transcode"] == "internet240":
|
||||||
|
vlc_command.extend([])
|
||||||
|
|
||||||
|
return vlc_command
|
||||||
59
fHDHR/device/tuners/tuner.py
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
import threading
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
from .stream import Stream
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
def __init__(self, fhdhr, inum, epg):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.number = inum
|
||||||
|
self.epg = epg
|
||||||
|
|
||||||
|
self.tuner_lock = threading.Lock()
|
||||||
|
self.set_off_status()
|
||||||
|
|
||||||
|
def grab(self):
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.fhdhr.logger.error("Tuner #" + str(self.number) + " is not available.")
|
||||||
|
raise TunerError("804 - Tuner In Use")
|
||||||
|
self.tuner_lock.acquire()
|
||||||
|
self.status["status"] = "Acquired"
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(self.number) + " Acquired.")
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.set_off_status()
|
||||||
|
if self.tuner_lock.locked():
|
||||||
|
self.tuner_lock.release()
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(self.number) + " Released.")
|
||||||
|
|
||||||
|
def get_status(self):
|
||||||
|
current_status = self.status.copy()
|
||||||
|
if current_status["status"] == "Active":
|
||||||
|
current_status["Play Time"] = str(
|
||||||
|
humanized_time(
|
||||||
|
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
|
||||||
|
current_status["time_start"] = str(current_status["time_start"])
|
||||||
|
current_status["epg"] = self.epg.whats_on_now(current_status["channel"])
|
||||||
|
return current_status
|
||||||
|
|
||||||
|
def set_off_status(self):
|
||||||
|
self.status = {"status": "Inactive"}
|
||||||
|
|
||||||
|
def get_stream(self, stream_args, tuner):
|
||||||
|
stream = Stream(self.fhdhr, stream_args, tuner)
|
||||||
|
return stream.get()
|
||||||
|
|
||||||
|
def set_status(self, stream_args):
|
||||||
|
self.status = {
|
||||||
|
"status": "Active",
|
||||||
|
"method": stream_args["method"],
|
||||||
|
"accessed": stream_args["accessed"],
|
||||||
|
"channel": stream_args["channel"],
|
||||||
|
"proxied_url": stream_args["channelUri"],
|
||||||
|
"time_start": datetime.datetime.utcnow(),
|
||||||
|
}
|
||||||
40
fHDHR/exceptions/__init__.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
|
||||||
|
class TunerError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'TunerError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class OriginSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'OriginSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class EPGSetupError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'EPGSetupError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationError(Exception):
|
||||||
|
def __init__(self, value):
|
||||||
|
self.value = value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'ConfigurationError: %s' % self.value
|
||||||
|
|
||||||
|
|
||||||
|
class ConfigurationNotFound(ConfigurationError):
|
||||||
|
def __init__(self, filename):
|
||||||
|
super(ConfigurationNotFound, self).__init__(None)
|
||||||
|
self.filename = filename
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return 'Unable to find the configuration file %s' % self.filename
|
||||||
77
fHDHR/http/__init__.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
from gevent.pywsgi import WSGIServer
|
||||||
|
from flask import Flask
|
||||||
|
|
||||||
|
from .pages import fHDHR_Pages
|
||||||
|
from .files import fHDHR_Files
|
||||||
|
from .api import fHDHR_API
|
||||||
|
from .watch import fHDHR_WATCH
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_HTTP_Server():
|
||||||
|
app = None
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading Flask.")
|
||||||
|
|
||||||
|
self.app = Flask("fHDHR", template_folder=self.template_folder)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Pages Endpoints.")
|
||||||
|
self.pages = fHDHR_Pages(fhdhr)
|
||||||
|
self.add_endpoints(self.pages, "pages")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Files Endpoints.")
|
||||||
|
self.files = fHDHR_Files(fhdhr)
|
||||||
|
self.add_endpoints(self.files, "files")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP API Endpoints.")
|
||||||
|
self.api = fHDHR_API(fhdhr)
|
||||||
|
self.add_endpoints(self.api, "api")
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Loading HTTP Stream Endpoints.")
|
||||||
|
self.watch = fHDHR_WATCH(fhdhr)
|
||||||
|
self.add_endpoints(self.watch, "watch")
|
||||||
|
|
||||||
|
def add_endpoints(self, index_list, index_name):
|
||||||
|
item_list = [x for x in dir(index_list) if self.isapath(x)]
|
||||||
|
for item in item_list:
|
||||||
|
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
|
||||||
|
if isinstance(endpoints, str):
|
||||||
|
endpoints = [endpoints]
|
||||||
|
handler = eval("self." + str(index_name) + "." + str(item))
|
||||||
|
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
|
||||||
|
try:
|
||||||
|
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
|
||||||
|
except AttributeError:
|
||||||
|
endpoint_methods = ['GET']
|
||||||
|
self.fhdhr.logger.info("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
|
||||||
|
for endpoint in endpoints:
|
||||||
|
self.add_endpoint(endpoint=endpoint,
|
||||||
|
endpoint_name=endpoint_name,
|
||||||
|
handler=handler,
|
||||||
|
methods=endpoint_methods)
|
||||||
|
|
||||||
|
def isapath(self, item):
|
||||||
|
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
|
||||||
|
if item in not_a_page_list:
|
||||||
|
return False
|
||||||
|
elif item.startswith("__") and item.endswith("__"):
|
||||||
|
return False
|
||||||
|
else:
|
||||||
|
return True
|
||||||
|
|
||||||
|
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
|
||||||
|
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
self.http = WSGIServer((
|
||||||
|
self.fhdhr.config.dict["fhdhr"]["address"],
|
||||||
|
int(self.fhdhr.config.dict["fhdhr"]["port"])
|
||||||
|
), self.app.wsgi_app)
|
||||||
|
try:
|
||||||
|
self.http.serve_forever()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
self.http.stop()
|
||||||
30
fHDHR/http/api/__init__.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
|
||||||
|
from .cluster import Cluster
|
||||||
|
from .settings import Settings
|
||||||
|
from .channels import Channels
|
||||||
|
from .lineup_post import Lineup_Post
|
||||||
|
from .xmltv import xmlTV
|
||||||
|
from .m3u import M3U
|
||||||
|
from .epg import EPG
|
||||||
|
from .watch import Watch
|
||||||
|
from .debug import Debug_JSON
|
||||||
|
|
||||||
|
from .images import Images
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_API():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.cluster = Cluster(fhdhr)
|
||||||
|
self.settings = Settings(fhdhr)
|
||||||
|
self.channels = Channels(fhdhr)
|
||||||
|
self.xmltv = xmlTV(fhdhr)
|
||||||
|
self.m3u = M3U(fhdhr)
|
||||||
|
self.epg = EPG(fhdhr)
|
||||||
|
self.watch = Watch(fhdhr)
|
||||||
|
self.debug = Debug_JSON(fhdhr)
|
||||||
|
self.lineup_post = Lineup_Post(fhdhr)
|
||||||
|
|
||||||
|
self.images = Images(fhdhr)
|
||||||
66
fHDHR/http/api/channels.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Channels():
|
||||||
|
endpoints = ["/api/channels"]
|
||||||
|
endpoint_name = "api_channels"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
channels_info = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url()
|
||||||
|
channel_dict["stream_url"] = channel_obj.stream_url()
|
||||||
|
channels_info.append(channel_dict)
|
||||||
|
channels_info_json = json.dumps(channels_info, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=channels_info_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
channel_id = request.form.get('id', None)
|
||||||
|
updatedict = {}
|
||||||
|
for key in list(request.form.keys()):
|
||||||
|
if key != "id":
|
||||||
|
if key in ["name", "callsign"]:
|
||||||
|
updatedict[key] = str(request.form.get(key))
|
||||||
|
elif key in ["number"]:
|
||||||
|
updatedict[key] = float(request.form.get(key))
|
||||||
|
elif key in ["enabled"]:
|
||||||
|
confvalue = request.form.get(key)
|
||||||
|
if str(confvalue).lower() in ["false"]:
|
||||||
|
confvalue = False
|
||||||
|
elif str(confvalue).lower() in ["true"]:
|
||||||
|
confvalue = True
|
||||||
|
updatedict[key] = confvalue
|
||||||
|
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict)
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.station_scan.scan()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
if method == "scan":
|
||||||
|
return redirect('/lineup_status.json')
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
53
fHDHR/http/api/cluster.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
from flask import request, redirect, Response
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster():
|
||||||
|
endpoints = ["/api/cluster"]
|
||||||
|
endpoint_name = "api_cluster"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
location = request.args.get("location", default=None, type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
jsoncluster = self.fhdhr.device.cluster.cluster()
|
||||||
|
cluster_json = json.dumps(jsoncluster, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "scan":
|
||||||
|
self.fhdhr.device.ssdp.m_search()
|
||||||
|
|
||||||
|
elif method == 'add':
|
||||||
|
self.fhdhr.device.cluster.add(location)
|
||||||
|
elif method == 'del':
|
||||||
|
self.fhdhr.device.cluster.remove(location)
|
||||||
|
|
||||||
|
elif method == 'sync':
|
||||||
|
self.fhdhr.device.cluster.sync(location)
|
||||||
|
|
||||||
|
elif method == 'leave':
|
||||||
|
self.fhdhr.device.cluster.leave()
|
||||||
|
elif method == 'disconnect':
|
||||||
|
self.fhdhr.device.cluster.disconnect()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "Invalid Method"
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
29
fHDHR/http/api/debug.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
from flask import request, Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Debug_JSON():
|
||||||
|
endpoints = ["/api/debug"]
|
||||||
|
endpoint_name = "api_debug"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
debugjson = {
|
||||||
|
"base_url": base_url,
|
||||||
|
"total channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"tuner status": self.fhdhr.device.tuners.status(),
|
||||||
|
}
|
||||||
|
cluster_json = json.dumps(debugjson, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=cluster_json,
|
||||||
|
mimetype='application/json')
|
||||||
49
fHDHR/http/api/epg.py
Normal file
@ -0,0 +1,49 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class EPG():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/epg"]
|
||||||
|
endpoint_name = "api_epg"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
epg_json = json.dumps(epgdict, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=epg_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
44
fHDHR/http/api/images.py
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
from flask import request, Response, abort
|
||||||
|
|
||||||
|
|
||||||
|
class Images():
|
||||||
|
endpoints = ["/api/images"]
|
||||||
|
endpoint_name = "api_images"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
image = None
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
if method == "generate":
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
message = request.args.get('message', default="Unknown Request", type=str)
|
||||||
|
image = self.fhdhr.device.images.generate_image(image_type, message)
|
||||||
|
|
||||||
|
elif method == "get":
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
|
||||||
|
if source in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
image_type = request.args.get('type', default="content", type=str)
|
||||||
|
if image_type in ["content", "channel"]:
|
||||||
|
image_id = request.args.get('id', default=None, type=str)
|
||||||
|
if image_id:
|
||||||
|
image = self.fhdhr.device.images.get_epg_image(image_type, image_id)
|
||||||
|
|
||||||
|
else:
|
||||||
|
image = self.fhdhr.device.images.generate_image("content", "Unknown Request")
|
||||||
|
|
||||||
|
if image:
|
||||||
|
imagemimetype = self.fhdhr.device.images.get_image_type(image)
|
||||||
|
return Response(image, content_type=imagemimetype, direct_passthrough=True)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid image request")
|
||||||
31
fHDHR/http/api/lineup_post.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from flask import request, abort, Response
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Post():
|
||||||
|
endpoints = ["/lineup.post"]
|
||||||
|
endpoint_name = "api_lineup_post"
|
||||||
|
endpoint_methods = ["POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if 'scan' in list(request.args.keys()):
|
||||||
|
|
||||||
|
if request.args['scan'] == 'start':
|
||||||
|
self.fhdhr.device.station_scan.scan()
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
elif request.args['scan'] == 'abort':
|
||||||
|
return Response(status=200, mimetype='text/html')
|
||||||
|
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
|
||||||
|
return abort(200, "Not a valid scan command")
|
||||||
|
|
||||||
|
else:
|
||||||
|
return abort(501, "Not a valid command")
|
||||||
89
fHDHR/http/api/m3u.py
Normal file
@ -0,0 +1,89 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
from io import StringIO
|
||||||
|
|
||||||
|
|
||||||
|
class M3U():
|
||||||
|
endpoints = ["/api/m3u", "/api/channels.m3u"]
|
||||||
|
endpoint_name = "api_m3u"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
channel = request.args.get('channel', default="all", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
FORMAT_DESCRIPTOR = "#EXTM3U"
|
||||||
|
RECORD_MARKER = "#EXTINF"
|
||||||
|
|
||||||
|
fakefile = StringIO()
|
||||||
|
|
||||||
|
xmltvurl = ('%s/api/xmltv' % base_url)
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
FORMAT_DESCRIPTOR + " " +
|
||||||
|
"url-tvg=\"" + xmltvurl + "\"" + " " +
|
||||||
|
"x-tvg-url=\"" + xmltvurl + "\"")
|
||||||
|
)
|
||||||
|
|
||||||
|
channel_items = []
|
||||||
|
|
||||||
|
if channel == "all":
|
||||||
|
fileName = "channels.m3u"
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
elif channel in self.fhdhr.device.channels.get_channel_list("number"):
|
||||||
|
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel)
|
||||||
|
fileName = str(channel_obj.number) + ".m3u"
|
||||||
|
if channel_obj.enabled:
|
||||||
|
channel_items.append(channel_obj)
|
||||||
|
else:
|
||||||
|
return "Channel Disabled"
|
||||||
|
else:
|
||||||
|
return "Invalid Channel"
|
||||||
|
|
||||||
|
for channel_obj in channel_items:
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
|
||||||
|
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
|
||||||
|
(base_url, str(channel_obj.dict['origin_id'])))
|
||||||
|
else:
|
||||||
|
logourl = channel_obj.thumbnail
|
||||||
|
|
||||||
|
fakefile.write(
|
||||||
|
"%s\n" % (
|
||||||
|
RECORD_MARKER + ":0" + " " +
|
||||||
|
"channelID=\"" + str(channel_obj.dict['origin_id']) + "\" " +
|
||||||
|
"tvg-chno=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-name=\"" + str(channel_obj.dict['name']) + "\" " +
|
||||||
|
"tvg-id=\"" + str(channel_obj.dict['number']) + "\" " +
|
||||||
|
"tvg-logo=\"" + logourl + "\" " +
|
||||||
|
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "\"," + str(channel_obj.dict['name']))
|
||||||
|
)
|
||||||
|
|
||||||
|
fakefile.write("%s\n" % (base_url + channel_obj.stream_url()))
|
||||||
|
|
||||||
|
channels_m3u = fakefile.getvalue()
|
||||||
|
|
||||||
|
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
|
||||||
|
resp.headers["content-disposition"] = "attachment; filename=" + fileName
|
||||||
|
return resp
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
40
fHDHR/http/api/settings.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
from flask import request, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Settings():
|
||||||
|
endpoints = ["/api/settings"]
|
||||||
|
endpoint_name = "api_settings"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "update":
|
||||||
|
config_section = request.form.get('config_section', None)
|
||||||
|
config_name = request.form.get('config_name', None)
|
||||||
|
config_value = request.form.get('config_value', None)
|
||||||
|
|
||||||
|
if not config_section or not config_name or not config_value:
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Failed" % method))
|
||||||
|
else:
|
||||||
|
return "%s Falied" % method
|
||||||
|
|
||||||
|
if config_section == "origin":
|
||||||
|
config_section = self.fhdhr.config.dict["main"]["dictpopname"]
|
||||||
|
|
||||||
|
self.fhdhr.config.write(config_section, config_name, config_value)
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
116
fHDHR/http/api/watch.py
Normal file
@ -0,0 +1,116 @@
|
|||||||
|
from flask import Response, request, redirect, abort, stream_with_context
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
from fHDHR.exceptions import TunerError
|
||||||
|
|
||||||
|
|
||||||
|
class Watch():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/watch"]
|
||||||
|
endpoint_name = "api_watch"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
client_address = request.remote_addr
|
||||||
|
|
||||||
|
accessed_url = request.args.get('accessed', default=request.url, type=str)
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
tuner_number = request.args.get('tuner', None, type=str)
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method in ["direct", "ffmpeg", "vlc"]:
|
||||||
|
|
||||||
|
channel_number = request.args.get('channel', None, type=str)
|
||||||
|
if not channel_number:
|
||||||
|
return "Missing Channel"
|
||||||
|
|
||||||
|
if channel_number not in self.fhdhr.device.channels.get_channel_list("number"):
|
||||||
|
response = Response("Not Found", status=404)
|
||||||
|
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
channel_dict = self.fhdhr.device.channels.get_channel_dict("number", channel_number)
|
||||||
|
if not channel_dict["enabled"]:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str("806 - Tune Failed")
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
valid_transcode_types = [None, "heavy", "mobile", "internet720", "internet480", "internet360", "internet240"]
|
||||||
|
if transcode not in valid_transcode_types:
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = "802 - Unknown Transcode Profile"
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
stream_args = {
|
||||||
|
"channel": channel_number,
|
||||||
|
"method": method,
|
||||||
|
"duration": duration,
|
||||||
|
"transcode": transcode,
|
||||||
|
"accessed": accessed_url,
|
||||||
|
"client": client_address
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
if not tuner_number:
|
||||||
|
tunernum = self.fhdhr.device.tuners.first_available()
|
||||||
|
else:
|
||||||
|
tunernum = self.fhdhr.device.tuners.tuner_grab(tuner_number)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
abort(response)
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[int(tunernum)]
|
||||||
|
|
||||||
|
try:
|
||||||
|
stream_args = self.fhdhr.device.tuners.get_stream_info(stream_args)
|
||||||
|
except TunerError as e:
|
||||||
|
self.fhdhr.logger.info("A %s stream request for channel %s was rejected due to %s"
|
||||||
|
% (stream_args["method"], str(stream_args["channel"]), str(e)))
|
||||||
|
response = Response("Service Unavailable", status=503)
|
||||||
|
response.headers["X-fHDHR-Error"] = str(e)
|
||||||
|
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
|
||||||
|
tuner.close()
|
||||||
|
abort(response)
|
||||||
|
|
||||||
|
self.fhdhr.logger.info("Tuner #" + str(tunernum) + " to be used for stream.")
|
||||||
|
tuner.set_status(stream_args)
|
||||||
|
|
||||||
|
if stream_args["method"] == "direct":
|
||||||
|
return Response(tuner.get_stream(stream_args, tuner), content_type=stream_args["content_type"], direct_passthrough=True)
|
||||||
|
elif stream_args["method"] in ["ffmpeg", "vlc"]:
|
||||||
|
return Response(stream_with_context(tuner.get_stream(stream_args, tuner)), mimetype=stream_args["content_type"])
|
||||||
|
|
||||||
|
elif method == "close":
|
||||||
|
|
||||||
|
if not tuner_number or int(tuner_number) not in list(self.fhdhr.device.tuners.tuners.keys()):
|
||||||
|
return "%s Invalid tuner" % str(tuner_number)
|
||||||
|
|
||||||
|
tuner = self.fhdhr.device.tuners.tuners[int(tuner_number)]
|
||||||
|
tuner.close()
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
156
fHDHR/http/api/xmltv.py
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
from flask import Response, request, redirect
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
from io import BytesIO
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV():
|
||||||
|
"""Methods to create xmltv.xml"""
|
||||||
|
endpoints = ["/api/xmltv", "/xmltv.xml"]
|
||||||
|
endpoint_name = "api_xmltv"
|
||||||
|
endpoint_methods = ["GET", "POST"]
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["require_auth"]:
|
||||||
|
DeviceAuth = request.args.get('DeviceAuth', default=None, type=str)
|
||||||
|
if DeviceAuth != self.fhdhr.config.dict["fhdhr"]["device_auth"]:
|
||||||
|
return "not subscribed"
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
method = request.args.get('method', default="get", type=str)
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
|
||||||
|
if source not in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
|
||||||
|
return "%s Invalid xmltv method" % source
|
||||||
|
|
||||||
|
redirect_url = request.args.get('redirect', default=None, type=str)
|
||||||
|
|
||||||
|
if method == "get":
|
||||||
|
|
||||||
|
epgdict = self.fhdhr.device.epg.get_epg(source)
|
||||||
|
xmltv_xml = self.create_xmltv(base_url, epgdict)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=xmltv_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
|
|
||||||
|
elif method == "update":
|
||||||
|
self.fhdhr.device.epg.update(source)
|
||||||
|
|
||||||
|
elif method == "clearcache":
|
||||||
|
self.fhdhr.device.epg.clear_epg_cache(source)
|
||||||
|
|
||||||
|
else:
|
||||||
|
return "%s Invalid Method" % method
|
||||||
|
|
||||||
|
if redirect_url:
|
||||||
|
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
|
||||||
|
else:
|
||||||
|
return "%s Success" % method
|
||||||
|
|
||||||
|
def xmltv_headers(self):
|
||||||
|
"""This method creates the XML headers for our xmltv"""
|
||||||
|
xmltvgen = xml.etree.ElementTree.Element('tv')
|
||||||
|
xmltvgen.set('source-info-url', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
xmltvgen.set('source-info-name', self.fhdhr.config.dict["main"]["servicename"])
|
||||||
|
xmltvgen.set('generator-info-name', 'fHDHR')
|
||||||
|
xmltvgen.set('generator-info-url', 'fHDHR/' + self.fhdhr.config.dict["main"]["reponame"])
|
||||||
|
return xmltvgen
|
||||||
|
|
||||||
|
def xmltv_file(self, xmltvgen):
|
||||||
|
"""This method is used to close out the xml file"""
|
||||||
|
xmltvfile = BytesIO()
|
||||||
|
xmltvfile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
xmltvfile.write(xml.etree.ElementTree.tostring(xmltvgen, encoding='UTF-8'))
|
||||||
|
return xmltvfile.getvalue()
|
||||||
|
|
||||||
|
def xmltv_empty(self):
|
||||||
|
"""This method is called when creation of a full xmltv is not possible"""
|
||||||
|
return self.xmltv_file(self.xmltv_headers())
|
||||||
|
|
||||||
|
def create_xmltv(self, base_url, epgdict):
|
||||||
|
if not epgdict:
|
||||||
|
return self.xmltv_empty()
|
||||||
|
|
||||||
|
out = self.xmltv_headers()
|
||||||
|
|
||||||
|
for c in list(epgdict.keys()):
|
||||||
|
|
||||||
|
c_out = sub_el(out, 'channel', id=str(epgdict[c]['number']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s' % (epgdict[c]['number'], epgdict[c]['callsign']))
|
||||||
|
sub_el(c_out, 'display-name',
|
||||||
|
text='%s %s %s' % (epgdict[c]['number'], epgdict[c]['callsign'], str(epgdict[c]['id'])))
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['number'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['callsign'])
|
||||||
|
sub_el(c_out, 'display-name', text=epgdict[c]['name'])
|
||||||
|
|
||||||
|
if epgdict[c]["thumbnail"] is not None:
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=channel&id=" + str(epgdict[c]['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(epgdict[c]["thumbnail"]))
|
||||||
|
else:
|
||||||
|
sub_el(c_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=channel&message=" + urllib.parse.quote(epgdict[c]['name'])))
|
||||||
|
|
||||||
|
for channelnum in list(epgdict.keys()):
|
||||||
|
|
||||||
|
channel_listing = epgdict[channelnum]['listing']
|
||||||
|
|
||||||
|
for program in channel_listing:
|
||||||
|
|
||||||
|
prog_out = sub_el(out, 'programme',
|
||||||
|
start=program['time_start'],
|
||||||
|
stop=program['time_end'],
|
||||||
|
channel=str(channelnum))
|
||||||
|
|
||||||
|
sub_el(prog_out, 'title', lang='en', text=program['title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'desc', lang='en', text=program['description'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'sub-title', lang='en', text='Movie: ' + program['sub-title'])
|
||||||
|
|
||||||
|
sub_el(prog_out, 'length', units='minutes', text=str(int(program['duration_minutes'])))
|
||||||
|
|
||||||
|
for f in program['genres']:
|
||||||
|
sub_el(prog_out, 'category', lang='en', text=f)
|
||||||
|
sub_el(prog_out, 'genre', lang='en', text=f)
|
||||||
|
|
||||||
|
if program['seasonnumber'] and program['episodenumber']:
|
||||||
|
s_ = int(str(program['seasonnumber']), 10)
|
||||||
|
e_ = int(str(program['episodenumber']), 10)
|
||||||
|
sub_el(prog_out, 'episode-num', system='dd_progid',
|
||||||
|
text=str(program['id']))
|
||||||
|
sub_el(prog_out, 'episode-num', system='common',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
sub_el(prog_out, 'episode-num', system='xmltv_ns',
|
||||||
|
text='%d.%d.' % (int(s_)-1, int(e_)-1))
|
||||||
|
sub_el(prog_out, 'episode-num', system='SxxExx">S',
|
||||||
|
text='S%02dE%02d' % (s_, e_))
|
||||||
|
|
||||||
|
if program["thumbnail"]:
|
||||||
|
if self.fhdhr.config.dict["epg"]["images"] == "proxy":
|
||||||
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=get&type=content&id=" + str(program['id'])))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(program["thumbnail"]))
|
||||||
|
else:
|
||||||
|
sub_el(prog_out, 'icon', src=(str(base_url) + "/api/images?method=generate&type=content&message=" + urllib.parse.quote(program['title'])))
|
||||||
|
|
||||||
|
if program['rating']:
|
||||||
|
rating_out = sub_el(prog_out, 'rating', system="MPAA")
|
||||||
|
sub_el(rating_out, 'value', text=program['rating'])
|
||||||
|
|
||||||
|
if program['isnew']:
|
||||||
|
sub_el(prog_out, 'new')
|
||||||
|
|
||||||
|
return self.xmltv_file(out)
|
||||||
27
fHDHR/http/files/__init__.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .favicon_ico import Favicon_ICO
|
||||||
|
from .style_css import Style_CSS
|
||||||
|
|
||||||
|
from .device_xml import Device_XML
|
||||||
|
from .lineup_xml import Lineup_XML
|
||||||
|
|
||||||
|
from .discover_json import Discover_JSON
|
||||||
|
from .lineup_json import Lineup_JSON
|
||||||
|
from .lineup_status_json import Lineup_Status_JSON
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Files():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.favicon = Favicon_ICO(fhdhr)
|
||||||
|
self.style = Style_CSS(fhdhr)
|
||||||
|
|
||||||
|
self.device_xml = Device_XML(fhdhr)
|
||||||
|
self.lineup_xml = Lineup_XML(fhdhr)
|
||||||
|
|
||||||
|
self.discover_json = Discover_JSON(fhdhr)
|
||||||
|
self.lineup_json = Lineup_JSON(fhdhr)
|
||||||
|
self.lineup_status_json = Lineup_Status_JSON(fhdhr)
|
||||||
47
fHDHR/http/files/device_xml.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Device_XML():
|
||||||
|
endpoints = ["/device.xml"]
|
||||||
|
endpoint_name = "file_device_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('root')
|
||||||
|
out.set('xmlns', "urn:schemas-upnp-org:device-1-0")
|
||||||
|
|
||||||
|
sub_el(out, 'URLBase', base_url)
|
||||||
|
|
||||||
|
specVersion_out = sub_el(out, 'specVersion')
|
||||||
|
sub_el(specVersion_out, 'major', "1")
|
||||||
|
sub_el(specVersion_out, 'minor', "0")
|
||||||
|
|
||||||
|
device_out = sub_el(out, 'device')
|
||||||
|
sub_el(device_out, 'deviceType', "urn:schemas-upnp-org:device:MediaServer:1")
|
||||||
|
sub_el(device_out, 'friendlyName', self.fhdhr.config.dict["fhdhr"]["friendlyname"])
|
||||||
|
sub_el(device_out, 'manufacturer', self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"])
|
||||||
|
sub_el(device_out, 'modelName', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'modelNumber', self.fhdhr.config.dict["fhdhr"]["reporting_model"])
|
||||||
|
sub_el(device_out, 'serialNumber')
|
||||||
|
sub_el(device_out, 'UDN', "uuid:" + self.fhdhr.config.dict["main"]["uuid"])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
device_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=device_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
35
fHDHR/http/files/discover_json.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Discover_JSON():
|
||||||
|
endpoints = ["/discover.json"]
|
||||||
|
endpoint_name = "file_discover_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsondiscover = {
|
||||||
|
"FriendlyName": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"Manufacturer": self.fhdhr.config.dict["fhdhr"]["reporting_manufacturer"],
|
||||||
|
"ModelNumber": self.fhdhr.config.dict["fhdhr"]["reporting_model"],
|
||||||
|
"FirmwareName": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_name"],
|
||||||
|
"TunerCount": self.fhdhr.config.dict["fhdhr"]["tuner_count"],
|
||||||
|
"FirmwareVersion": self.fhdhr.config.dict["fhdhr"]["reporting_firmware_ver"],
|
||||||
|
"DeviceID": self.fhdhr.config.dict["main"]["uuid"],
|
||||||
|
"DeviceAuth": self.fhdhr.config.dict["fhdhr"]["device_auth"],
|
||||||
|
"BaseURL": base_url,
|
||||||
|
"LineupURL": base_url + "/lineup.json"
|
||||||
|
}
|
||||||
|
discover_json = json.dumps(jsondiscover, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=discover_json,
|
||||||
|
mimetype='application/json')
|
||||||
18
fHDHR/http/files/favicon_ico.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Favicon_ICO():
|
||||||
|
endpoints = ["/favicon.ico"]
|
||||||
|
endpoint_name = "file_favicon_ico"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||||
|
'favicon.ico',
|
||||||
|
mimetype='image/vnd.microsoft.icon')
|
||||||
31
fHDHR/http/files/lineup_json.py
Normal file
@ -0,0 +1,31 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_JSON():
|
||||||
|
endpoints = ["/lineup.json"]
|
||||||
|
endpoint_name = "file_lineup_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
jsonlineup = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
lineup_dict = channel_obj.lineup_dict()
|
||||||
|
lineup_dict["URL"] = base_url + lineup_dict["URL"]
|
||||||
|
jsonlineup.append(lineup_dict)
|
||||||
|
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
45
fHDHR/http/files/lineup_status_json.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import Response
|
||||||
|
import json
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_Status_JSON():
|
||||||
|
endpoints = ["/lineup_status.json"]
|
||||||
|
endpoint_name = "file_lineup_status_json"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
station_scanning = self.fhdhr.device.station_scan.scanning()
|
||||||
|
if station_scanning:
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
elif not len(self.fhdhr.device.channels.list):
|
||||||
|
jsonlineup = self.scan_in_progress()
|
||||||
|
else:
|
||||||
|
jsonlineup = self.not_scanning()
|
||||||
|
lineup_json = json.dumps(jsonlineup, indent=4)
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_json,
|
||||||
|
mimetype='application/json')
|
||||||
|
|
||||||
|
def scan_in_progress(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "true",
|
||||||
|
"Progress": 99,
|
||||||
|
"Found": len(self.fhdhr.device.channels.list)
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
|
|
||||||
|
def not_scanning(self):
|
||||||
|
jsonlineup = {
|
||||||
|
"ScanInProgress": "false",
|
||||||
|
"ScanPossible": "true",
|
||||||
|
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
|
||||||
|
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
|
||||||
|
}
|
||||||
|
return jsonlineup
|
||||||
41
fHDHR/http/files/lineup_xml.py
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
from flask import Response, request
|
||||||
|
from io import BytesIO
|
||||||
|
import xml.etree.ElementTree
|
||||||
|
|
||||||
|
from fHDHR.tools import sub_el
|
||||||
|
|
||||||
|
|
||||||
|
class Lineup_XML():
|
||||||
|
endpoints = ["/lineup.xml"]
|
||||||
|
endpoint_name = "file_lineup_xml"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
base_url = request.url_root[:-1]
|
||||||
|
|
||||||
|
out = xml.etree.ElementTree.Element('Lineup')
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
if channel_obj.enabled:
|
||||||
|
lineup_dict = channel_obj.lineup_dict()
|
||||||
|
lineup_dict["URL"] = base_url + lineup_dict["URL"]
|
||||||
|
program_out = sub_el(out, 'Program')
|
||||||
|
sub_el(program_out, 'GuideNumber', lineup_dict['GuideNumber'])
|
||||||
|
sub_el(program_out, 'GuideName', lineup_dict['GuideName'])
|
||||||
|
sub_el(program_out, 'Tags', lineup_dict['Tags'])
|
||||||
|
sub_el(program_out, 'URL', lineup_dict['URL'])
|
||||||
|
|
||||||
|
fakefile = BytesIO()
|
||||||
|
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
|
||||||
|
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
|
||||||
|
lineup_xml = fakefile.getvalue()
|
||||||
|
|
||||||
|
return Response(status=200,
|
||||||
|
response=lineup_xml,
|
||||||
|
mimetype='application/xml')
|
||||||
17
fHDHR/http/files/style_css.py
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
from flask import send_from_directory
|
||||||
|
|
||||||
|
|
||||||
|
class Style_CSS():
|
||||||
|
endpoints = ["/style.css"]
|
||||||
|
endpoint_name = "file_style_css"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return send_from_directory(self.fhdhr.config.internal["paths"]["www_dir"],
|
||||||
|
'style.css')
|
||||||
29
fHDHR/http/pages/__init__.py
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
|
||||||
|
|
||||||
|
from .index_html import Index_HTML
|
||||||
|
from .origin_html import Origin_HTML
|
||||||
|
from .channels_html import Channels_HTML
|
||||||
|
from .guide_html import Guide_HTML
|
||||||
|
from .cluster_html import Cluster_HTML
|
||||||
|
from .streams_html import Streams_HTML
|
||||||
|
from .xmltv_html import xmlTV_HTML
|
||||||
|
from .version_html import Version_HTML
|
||||||
|
from .diagnostics_html import Diagnostics_HTML
|
||||||
|
from .settings_html import Settings_HTML
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_Pages():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.index_html = Index_HTML(fhdhr)
|
||||||
|
self.origin_html = Origin_HTML(fhdhr)
|
||||||
|
self.channels_html = Channels_HTML(fhdhr)
|
||||||
|
self.guide_html = Guide_HTML(fhdhr)
|
||||||
|
self.cluster_html = Cluster_HTML(fhdhr)
|
||||||
|
self.streams_html = Streams_HTML(fhdhr)
|
||||||
|
self.xmltv_html = xmlTV_HTML(fhdhr)
|
||||||
|
self.version_html = Version_HTML(fhdhr)
|
||||||
|
self.diagnostics_html = Diagnostics_HTML(fhdhr)
|
||||||
|
self.settings_html = Settings_HTML(fhdhr)
|
||||||
23
fHDHR/http/pages/channels_html.py
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Channels_HTML():
|
||||||
|
endpoints = ["/channels", "/channels.html"]
|
||||||
|
endpoint_name = "page_channels_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
channelslist = []
|
||||||
|
for fhdhr_id in list(self.fhdhr.device.channels.list.keys()):
|
||||||
|
channel_obj = self.fhdhr.device.channels.list[fhdhr_id]
|
||||||
|
channel_dict = channel_obj.dict.copy()
|
||||||
|
channel_dict["play_url"] = channel_obj.play_url()
|
||||||
|
channelslist.append(channel_dict)
|
||||||
|
|
||||||
|
return render_template('channels.html', request=request, fhdhr=self.fhdhr, channelslist=channelslist)
|
||||||
50
fHDHR/http/pages/cluster_html.py
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Cluster_HTML():
|
||||||
|
endpoints = ["/cluster", "/cluster.html"]
|
||||||
|
endpoint_name = "page_cluster_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.location_dict = {
|
||||||
|
"name": self.fhdhr.config.dict["fhdhr"]["friendlyname"],
|
||||||
|
"location": self.fhdhr.device.cluster.location,
|
||||||
|
"joined": "N/A",
|
||||||
|
"url_query": self.fhdhr.device.cluster.location_url
|
||||||
|
}
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
locations_list = []
|
||||||
|
|
||||||
|
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
|
||||||
|
|
||||||
|
locations_list.append(self.location_dict)
|
||||||
|
|
||||||
|
fhdhr_list = self.fhdhr.device.cluster.get_list()
|
||||||
|
for location in list(fhdhr_list.keys()):
|
||||||
|
|
||||||
|
if location in list(self.fhdhr.device.cluster.cluster().keys()):
|
||||||
|
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
location_info_url = location + "/discover.json"
|
||||||
|
location_info_req = self.fhdhr.web.session.get(location_info_url)
|
||||||
|
location_info = location_info_req.json()
|
||||||
|
location_name = location_info["FriendlyName"]
|
||||||
|
except self.fhdhr.web.exceptions.ConnectionError:
|
||||||
|
self.fhdhr.logger.error("Unreachable: " + location)
|
||||||
|
location_dict = {
|
||||||
|
"name": location_name,
|
||||||
|
"location": location,
|
||||||
|
"joined": str(fhdhr_list[location]["Joined"]),
|
||||||
|
"url_query": urllib.parse.quote(location)
|
||||||
|
}
|
||||||
|
locations_list.append(location_dict)
|
||||||
|
|
||||||
|
return render_template('cluster.html', request=request, fhdhr=self.fhdhr, locations_list=locations_list)
|
||||||
27
fHDHR/http/pages/diagnostics_html.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Diagnostics_HTML():
|
||||||
|
endpoints = ["/diagnostics", "/diagnostics.html"]
|
||||||
|
endpoint_name = "page_diagnostics_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
# a list of 2 part lists containing button information
|
||||||
|
button_list = [
|
||||||
|
["debug.json", "/api/debug"],
|
||||||
|
["device.xml", "device.xml"],
|
||||||
|
["discover.json", "discover.json"],
|
||||||
|
["lineup.json", "lineup.json"],
|
||||||
|
["lineup.xml", "lineup.xml"],
|
||||||
|
["lineup_status.json", "lineup_status.json"],
|
||||||
|
["cluster.json", "/api/cluster?method=get"]
|
||||||
|
]
|
||||||
|
|
||||||
|
return render_template('diagnostics.html', request=request, fhdhr=self.fhdhr, button_list=button_list)
|
||||||
43
fHDHR/http/pages/guide_html.py
Normal file
@ -0,0 +1,43 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
from fHDHR.tools import humanized_time
|
||||||
|
|
||||||
|
|
||||||
|
class Guide_HTML():
|
||||||
|
endpoints = ["/guide", "/guide.html"]
|
||||||
|
endpoint_name = "page_guide_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
nowtime = datetime.datetime.utcnow()
|
||||||
|
|
||||||
|
chan_guide_list = []
|
||||||
|
|
||||||
|
source = request.args.get('source', default=self.fhdhr.device.epg.def_method, type=str)
|
||||||
|
epg_methods = self.fhdhr.device.epg.valid_epg_methods
|
||||||
|
if source not in epg_methods:
|
||||||
|
source = self.fhdhr.device.epg.def_method
|
||||||
|
|
||||||
|
for channel in self.fhdhr.device.epg.whats_on_allchans(source):
|
||||||
|
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
|
||||||
|
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
|
||||||
|
|
||||||
|
chan_dict = {
|
||||||
|
"name": channel["name"],
|
||||||
|
"number": channel["number"],
|
||||||
|
"chan_thumbnail": channel["thumbnail"],
|
||||||
|
"listing_title": channel["listing"][0]["title"],
|
||||||
|
"listing_thumbnail": channel["listing"][0]["thumbnail"],
|
||||||
|
"listing_description": channel["listing"][0]["description"],
|
||||||
|
"remaining_time": str(remaining_time)
|
||||||
|
}
|
||||||
|
chan_guide_list.append(chan_dict)
|
||||||
|
|
||||||
|
return render_template('guide.html', request=request, fhdhr=self.fhdhr, chan_guide_list=chan_guide_list, epg_methods=epg_methods)
|
||||||
27
fHDHR/http/pages/index_html.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Index_HTML():
|
||||||
|
endpoints = ["/", "/index", "/index.html"]
|
||||||
|
endpoint_name = "page_root_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
|
||||||
|
max_tuners = self.fhdhr.device.tuners.max_tuners
|
||||||
|
|
||||||
|
fhdhr_status_dict = {
|
||||||
|
"Script Directory": str(self.fhdhr.config.internal["paths"]["script_dir"]),
|
||||||
|
"Config File": str(self.fhdhr.config.config_file),
|
||||||
|
"Cache Path": str(self.fhdhr.config.internal["paths"]["cache_dir"]),
|
||||||
|
"Total Channels": len(self.fhdhr.device.channels.list),
|
||||||
|
"Tuner Usage": ("%s/%s" % (str(tuners_in_use), str(max_tuners))),
|
||||||
|
}
|
||||||
|
|
||||||
|
return render_template('index.html', request=request, fhdhr=self.fhdhr, fhdhr_status_dict=fhdhr_status_dict, list=list)
|
||||||
18
fHDHR/http/pages/origin_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Origin_HTML():
|
||||||
|
endpoints = ["/origin", "/origin.html"]
|
||||||
|
endpoint_name = "page_origin_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
origin_status_dict = self.fhdhr.origin.get_status_dict()
|
||||||
|
origin_status_dict["Total Channels"] = len(self.fhdhr.device.channels.list)
|
||||||
|
return render_template('origin.html', request=request, fhdhr=self.fhdhr, origin_status_dict=origin_status_dict, list=list)
|
||||||
33
fHDHR/http/pages/settings_html.py
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Settings_HTML():
|
||||||
|
endpoints = ["/settings", "/settings.html"]
|
||||||
|
endpoint_name = "page_settings_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
web_settings_dict = {}
|
||||||
|
for config_section in list(self.fhdhr.config.conf_default.keys()):
|
||||||
|
web_settings_dict[config_section] = {}
|
||||||
|
|
||||||
|
for config_item in list(self.fhdhr.config.conf_default[config_section].keys()):
|
||||||
|
if self.fhdhr.config.conf_default[config_section][config_item]["config_web"]:
|
||||||
|
real_config_section = config_section
|
||||||
|
if config_section == self.fhdhr.config.dict["main"]["dictpopname"]:
|
||||||
|
real_config_section = "origin"
|
||||||
|
web_settings_dict[config_section][config_item] = {
|
||||||
|
"value": self.fhdhr.config.dict[real_config_section][config_item],
|
||||||
|
"value_default": self.fhdhr.config.conf_default[config_section][config_item]["value"],
|
||||||
|
"hide": self.fhdhr.config.conf_default[config_section][config_item]["config_web_hidden"]
|
||||||
|
}
|
||||||
|
if not len(web_settings_dict[config_section].keys()):
|
||||||
|
del web_settings_dict[config_section]
|
||||||
|
|
||||||
|
return render_template('settings.html', request=request, fhdhr=self.fhdhr, web_settings_dict=web_settings_dict, list=list)
|
||||||
30
fHDHR/http/pages/streams_html.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Streams_HTML():
|
||||||
|
endpoints = ["/streams", "/streams.html"]
|
||||||
|
endpoint_name = "page_streams_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
tuner_list = []
|
||||||
|
tuner_status = self.fhdhr.device.tuners.status()
|
||||||
|
for tuner in list(tuner_status.keys()):
|
||||||
|
tuner_dict = {
|
||||||
|
"number": str(tuner),
|
||||||
|
"status": str(tuner_status[tuner]["status"]),
|
||||||
|
}
|
||||||
|
if tuner_status[tuner]["status"] == "Active":
|
||||||
|
tuner_dict["channel_number"] = tuner_status[tuner]["channel"]
|
||||||
|
tuner_dict["method"] = tuner_status[tuner]["method"]
|
||||||
|
tuner_dict["play_duration"] = str(tuner_status[tuner]["Play Time"])
|
||||||
|
|
||||||
|
tuner_list.append(tuner_dict)
|
||||||
|
|
||||||
|
return render_template('streams.html', request=request, fhdhr=self.fhdhr, tuner_list=tuner_list)
|
||||||
18
fHDHR/http/pages/version_html.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class Version_HTML():
|
||||||
|
endpoints = ["/version", "/version.html"]
|
||||||
|
endpoint_name = "page_version_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
version_dict = {}
|
||||||
|
for key in list(self.fhdhr.config.internal["versions"].keys()):
|
||||||
|
version_dict[key] = self.fhdhr.config.internal["versions"][key]
|
||||||
|
return render_template('version.html', request=request, fhdhr=self.fhdhr, version_dict=version_dict, list=list)
|
||||||
16
fHDHR/http/pages/xmltv_html.py
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
from flask import request, render_template
|
||||||
|
|
||||||
|
|
||||||
|
class xmlTV_HTML():
|
||||||
|
endpoints = ["/xmltv", "/xmltv.html"]
|
||||||
|
endpoint_name = "page_xmltv_html"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, *args):
|
||||||
|
return self.get(*args)
|
||||||
|
|
||||||
|
def get(self, *args):
|
||||||
|
|
||||||
|
return render_template('xmltv.html', request=request, fhdhr=self.fhdhr)
|
||||||
12
fHDHR/http/watch/__init__.py
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
|
||||||
|
from .auto import Auto
|
||||||
|
from .tuner import Tuner
|
||||||
|
|
||||||
|
|
||||||
|
class fHDHR_WATCH():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.auto = Auto(fhdhr)
|
||||||
|
self.tuner = Tuner(fhdhr)
|
||||||
45
fHDHR/http/watch/auto.py
Normal file
@ -0,0 +1,45 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Auto():
|
||||||
|
endpoints = ['/auto/<channel>']
|
||||||
|
endpoint_name = "watch_auto"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, channel, *args):
|
||||||
|
return self.get(channel, *args)
|
||||||
|
|
||||||
|
def get(self, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/watch?method=%s" % (method)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
47
fHDHR/http/watch/tuner.py
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
from flask import request, abort, redirect
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
|
||||||
|
class Tuner():
|
||||||
|
endpoints = ['/tuner<tuner_number>/<channel>']
|
||||||
|
endpoint_name = "watch_tuner"
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
def __call__(self, tuner_number, channel, *args):
|
||||||
|
return self.get(tuner_number, channel, *args)
|
||||||
|
|
||||||
|
def get(self, tuner_number, channel, *args):
|
||||||
|
|
||||||
|
method = request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str)
|
||||||
|
|
||||||
|
redirect_url = "/api/watch?method=%s" % (method)
|
||||||
|
|
||||||
|
redirect_url += "&tuner=%s" % str(tuner_number)
|
||||||
|
|
||||||
|
if channel.startswith("v"):
|
||||||
|
channel_number = channel.replace('v', '')
|
||||||
|
elif channel.startswith("ch"):
|
||||||
|
channel_freq = channel.replace('ch', '').split("-")[0]
|
||||||
|
subchannel = 0
|
||||||
|
if "-" in channel:
|
||||||
|
subchannel = channel.replace('ch', '').split("-")[1]
|
||||||
|
self.fhdhr.logger.error("Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
abort(501, "Not Implemented %s-%s" % (str(channel_freq), str(subchannel)))
|
||||||
|
else:
|
||||||
|
channel_number = channel
|
||||||
|
|
||||||
|
redirect_url += "&channel=%s" % str(channel_number)
|
||||||
|
|
||||||
|
duration = request.args.get('duration', default=0, type=int)
|
||||||
|
if duration:
|
||||||
|
redirect_url += "&duration=%s" % str(duration)
|
||||||
|
|
||||||
|
transcode = request.args.get('transcode', default=None, type=str)
|
||||||
|
if transcode:
|
||||||
|
redirect_url += "&transcode=%s" % str(transcode)
|
||||||
|
|
||||||
|
redirect_url += "&accessed=%s" % urllib.parse.quote(request.url)
|
||||||
|
|
||||||
|
return redirect(redirect_url)
|
||||||
93
fHDHR/origin/__init__.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
from .origin_service import OriginService
|
||||||
|
from .origin_channels import OriginChannels
|
||||||
|
from .origin_epg import OriginEPG
|
||||||
|
|
||||||
|
import fHDHR.exceptions
|
||||||
|
|
||||||
|
|
||||||
|
class OriginEPG_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels_StandIN():
|
||||||
|
def __init__(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return []
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
class OriginServiceWrapper():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.servicename = fhdhr.config.dict["main"]["servicename"]
|
||||||
|
|
||||||
|
self.setup_success = None
|
||||||
|
self.setup()
|
||||||
|
|
||||||
|
def setup(self):
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.origin = OriginService(self.fhdhr)
|
||||||
|
self.setup_success = True
|
||||||
|
self.fhdhr.logger.info("%s Setup Success" % self.servicename)
|
||||||
|
except fHDHR.exceptions.OriginSetupError as e:
|
||||||
|
self.fhdhr.logger.error(e)
|
||||||
|
self.setup_success = False
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
self.channels = OriginChannels(self.fhdhr, self.origin)
|
||||||
|
self.epg = OriginEPG(self.fhdhr)
|
||||||
|
else:
|
||||||
|
self.channels = OriginChannels_StandIN()
|
||||||
|
self.epg = OriginEPG_StandIN()
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
return self.channels.get_channels()
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
return self.channels.get_channel_stream(chandict)
|
||||||
|
|
||||||
|
def update_epg(self, channels):
|
||||||
|
return self.epg.update_epg(channels)
|
||||||
|
|
||||||
|
def get_status_dict(self):
|
||||||
|
|
||||||
|
if self.setup_success:
|
||||||
|
status_dict = {
|
||||||
|
"Setup": "Success",
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
full_status_dict = self.origin.get_status_dict()
|
||||||
|
for status_key in list(full_status_dict.keys()):
|
||||||
|
status_dict[status_key] = full_status_dict[status_key]
|
||||||
|
return status_dict
|
||||||
|
except AttributeError:
|
||||||
|
return status_dict
|
||||||
|
else:
|
||||||
|
return {
|
||||||
|
"Setup": "Failed",
|
||||||
|
}
|
||||||
|
|
||||||
|
def __getattr__(self, name):
|
||||||
|
''' will only get called for undefined attributes '''
|
||||||
|
if hasattr(self.fhdhr, name):
|
||||||
|
return eval("self.fhdhr." + name)
|
||||||
|
if hasattr(self.origin, name):
|
||||||
|
return eval("self.origin." + name)
|
||||||
|
elif hasattr(self.channels, name):
|
||||||
|
return eval("self.channels." + name)
|
||||||
|
elif hasattr(self.epg, name):
|
||||||
|
return eval("self.epg." + name)
|
||||||
|
else:
|
||||||
|
raise AttributeError(name)
|
||||||
90
fHDHR/origin/origin_channels.py
Normal file
@ -0,0 +1,90 @@
|
|||||||
|
import urllib.parse
|
||||||
|
import m3u8
|
||||||
|
import time
|
||||||
|
|
||||||
|
|
||||||
|
class OriginChannels():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr, origin):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
self.origin = origin
|
||||||
|
|
||||||
|
self.base_api_url = 'https://api.pluto.tv'
|
||||||
|
|
||||||
|
def get_channels(self):
|
||||||
|
|
||||||
|
url = self.base_api_url + "/v2/channels.json"
|
||||||
|
urlopn = self.fhdhr.web.session.get(url)
|
||||||
|
pluto_chan_list = urlopn.json()
|
||||||
|
|
||||||
|
channel_list = []
|
||||||
|
for channel_dict in pluto_chan_list:
|
||||||
|
|
||||||
|
if (channel_dict["isStitched"]
|
||||||
|
and channel_dict["visibility"] in ["everyone"]
|
||||||
|
and not channel_dict['onDemand']
|
||||||
|
and channel_dict["name"] != "Announcement"):
|
||||||
|
|
||||||
|
clean_station_item = {
|
||||||
|
"name": channel_dict["name"],
|
||||||
|
"callsign": channel_dict["name"],
|
||||||
|
"number": str(channel_dict["number"]),
|
||||||
|
"id": str(channel_dict["_id"]),
|
||||||
|
}
|
||||||
|
channel_list.append(clean_station_item)
|
||||||
|
return channel_list
|
||||||
|
|
||||||
|
def get_channel_stream(self, chandict):
|
||||||
|
url = self.base_api_url + "/v2/channels.json"
|
||||||
|
urlopn = self.fhdhr.web.session.get(url)
|
||||||
|
pluto_chan_list = urlopn.json()
|
||||||
|
pluto_chandict = self.get_channel_dict_pluto(pluto_chan_list, "_id", chandict["origin_id"])
|
||||||
|
|
||||||
|
streamurl = pluto_chandict["stitched"]["urls"][0]["url"]
|
||||||
|
streamurl = self.channel_stream_url_cleanup(streamurl)
|
||||||
|
if self.fhdhr.config.dict["origin"]["force_best"]:
|
||||||
|
streamurl = self.m3u8_beststream(streamurl)
|
||||||
|
return streamurl
|
||||||
|
|
||||||
|
def get_channel_dict_pluto(self, chanlist, keyfind, valfind):
|
||||||
|
return next(item for item in chanlist if item[keyfind] == valfind)
|
||||||
|
|
||||||
|
def channel_stream_url_cleanup(self, streamurl):
|
||||||
|
|
||||||
|
streamurl = streamurl.replace("\\u0026", "&")
|
||||||
|
streamurl_base = streamurl.split("?")[0]
|
||||||
|
streamurl_params = streamurl.split("?")[1].split("&")
|
||||||
|
|
||||||
|
paramdict = {}
|
||||||
|
|
||||||
|
for param in streamurl_params:
|
||||||
|
paramkey = param.split("=")[0]
|
||||||
|
paramval = param.split("=")[1]
|
||||||
|
paramdict[paramkey] = paramval
|
||||||
|
|
||||||
|
paramdict["deviceMake"] = "Chrome"
|
||||||
|
paramdict["deviceType"] = "web"
|
||||||
|
paramdict["deviceModel"] = "Chrome"
|
||||||
|
paramdict["sid"] = self.fhdhr.config.dict["main"]["uuid"] + str(time.time())
|
||||||
|
paramdict["userId"] = self.origin.userid or ''
|
||||||
|
|
||||||
|
paramdict["serverSideAds"] = "true"
|
||||||
|
|
||||||
|
return streamurl_base + "?" + urllib.parse.urlencode(paramdict)
|
||||||
|
|
||||||
|
def m3u8_beststream(self, m3u8_url):
|
||||||
|
bestStream = None
|
||||||
|
videoUrlM3u = m3u8.load(m3u8_url)
|
||||||
|
if not videoUrlM3u.is_variant:
|
||||||
|
return m3u8_url
|
||||||
|
|
||||||
|
for videoStream in videoUrlM3u.playlists:
|
||||||
|
if not bestStream:
|
||||||
|
bestStream = videoStream
|
||||||
|
elif videoStream.stream_info.bandwidth > bestStream.stream_info.bandwidth:
|
||||||
|
bestStream = videoStream
|
||||||
|
|
||||||
|
if not bestStream:
|
||||||
|
return bestStream.absolute_uri
|
||||||
|
else:
|
||||||
|
return m3u8_url
|
||||||
167
fHDHR/origin/origin_epg.py
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
import datetime
|
||||||
|
|
||||||
|
import fHDHR.tools
|
||||||
|
|
||||||
|
|
||||||
|
class OriginEPG():
|
||||||
|
|
||||||
|
def __init__(self, fhdhr):
|
||||||
|
self.fhdhr = fhdhr
|
||||||
|
|
||||||
|
self.base_api_url = 'https://api.pluto.tv'
|
||||||
|
|
||||||
|
def xmltimestamp_pluto(self, inputtime):
|
||||||
|
xmltime = inputtime.replace('Z', '+00:00')
|
||||||
|
xmltime = datetime.datetime.fromisoformat(xmltime)
|
||||||
|
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
|
||||||
|
return xmltime
|
||||||
|
|
||||||
|
def duration_pluto_minutes(self, induration):
|
||||||
|
return ((int(induration))/1000/60)
|
||||||
|
|
||||||
|
def pluto_calculate_duration(self, start_time, end_time):
|
||||||
|
start_time = start_time.replace('Z', '+00:00')
|
||||||
|
start_time = datetime.datetime.fromisoformat(start_time)
|
||||||
|
|
||||||
|
end_time = end_time.replace('Z', '+00:00')
|
||||||
|
end_time = datetime.datetime.fromisoformat(end_time)
|
||||||
|
|
||||||
|
duration = (end_time - start_time).total_seconds() / 60
|
||||||
|
return duration
|
||||||
|
|
||||||
|
def update_epg(self, fhdhr_channels):
|
||||||
|
programguide = {}
|
||||||
|
|
||||||
|
todaydate = datetime.datetime.utcnow().date()
|
||||||
|
self.remove_stale_cache(todaydate)
|
||||||
|
|
||||||
|
time_list = []
|
||||||
|
xtimestart = datetime.datetime.today().replace(hour=0, minute=0, second=0, microsecond=0)
|
||||||
|
xtime = xtimestart
|
||||||
|
xtimeend = datetime.datetime.utcnow() + datetime.timedelta(hours=6)
|
||||||
|
while xtime <= xtimeend:
|
||||||
|
guide_time = {"start": str(xtime.strftime('%Y-%m-%dT%H:00:00'))}
|
||||||
|
if (xtime + datetime.timedelta(hours=6)) <= xtimeend:
|
||||||
|
guide_time["end"] = str((xtime + datetime.timedelta(hours=6)).strftime('%Y-%m-%dT%H:00:00'))
|
||||||
|
else:
|
||||||
|
guide_time["end"] = str(xtimeend.strftime('%Y-%m-%dT%H:00:00'))
|
||||||
|
xtime = xtime + datetime.timedelta(hours=6)
|
||||||
|
time_list.append(guide_time)
|
||||||
|
|
||||||
|
cached_items = self.get_cached(time_list)
|
||||||
|
|
||||||
|
for result in cached_items:
|
||||||
|
|
||||||
|
for c in result:
|
||||||
|
|
||||||
|
if (c["isStitched"]
|
||||||
|
and c["visibility"] in ["everyone"]
|
||||||
|
and not c['onDemand']
|
||||||
|
and c["name"] != "Announcement"):
|
||||||
|
|
||||||
|
cdict = fHDHR.tools.xmldictmaker(c, ["name", "number", "_id", "timelines", "colorLogoPNG"], list_items=["timelines"])
|
||||||
|
|
||||||
|
chandict = fhdhr_channels.get_channel_dict("origin_id", cdict["_id"])
|
||||||
|
|
||||||
|
if str(chandict['number']) not in list(programguide.keys()):
|
||||||
|
|
||||||
|
programguide[str(chandict['number'])] = {
|
||||||
|
"callsign": chandict["callsign"],
|
||||||
|
"name": chandict["name"] or chandict["callsign"],
|
||||||
|
"number": chandict["number"],
|
||||||
|
"id": str(chandict["origin_id"]),
|
||||||
|
"thumbnail": None,
|
||||||
|
"listing": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
thumbnail = cdict["colorLogoPNG"]["path"].split("?")[0]
|
||||||
|
except TypeError:
|
||||||
|
thumbnail = None
|
||||||
|
programguide[str(chandict['number'])]["thumbnail"] = thumbnail
|
||||||
|
|
||||||
|
for program_item in cdict["timelines"]:
|
||||||
|
|
||||||
|
progdict = fHDHR.tools.xmldictmaker(program_item, ['_id', 'start', 'stop', 'title', 'episode'])
|
||||||
|
episodedict = fHDHR.tools.xmldictmaker(program_item['episode'], ['duration', 'poster', '_id', 'rating', 'description', 'genre', 'subGenre', 'name'])
|
||||||
|
|
||||||
|
if not episodedict["duration"]:
|
||||||
|
episodedict["duration"] = self.pluto_calculate_duration(progdict["start"], progdict["stop"])
|
||||||
|
else:
|
||||||
|
episodedict["duration"] = self.duration_pluto_minutes(episodedict["duration"])
|
||||||
|
|
||||||
|
clean_prog_dict = {
|
||||||
|
"time_start": self.xmltimestamp_pluto(progdict["start"]),
|
||||||
|
"time_end": self.xmltimestamp_pluto(progdict["stop"]),
|
||||||
|
"duration_minutes": episodedict["duration"],
|
||||||
|
"thumbnail": None,
|
||||||
|
"title": progdict['title'] or "Unavailable",
|
||||||
|
"sub-title": episodedict['name'] or "Unavailable",
|
||||||
|
"description": episodedict['description'] or "Unavailable",
|
||||||
|
"rating": episodedict['rating'] or "N/A",
|
||||||
|
"episodetitle": None,
|
||||||
|
"releaseyear": None,
|
||||||
|
"genres": [],
|
||||||
|
"seasonnumber": None,
|
||||||
|
"episodenumber": None,
|
||||||
|
"isnew": False,
|
||||||
|
"id": episodedict['_id'] or self.xmltimestamp_pluto(progdict["start"]),
|
||||||
|
}
|
||||||
|
try:
|
||||||
|
thumbnail = episodedict["poster"]["path"].split("?")[0]
|
||||||
|
except TypeError:
|
||||||
|
thumbnail = None
|
||||||
|
clean_prog_dict["thumbnail"] = thumbnail
|
||||||
|
|
||||||
|
clean_prog_dict["genres"].extend(episodedict["genre"].split(" \\u0026 "))
|
||||||
|
clean_prog_dict["genres"].append(episodedict["subGenre"])
|
||||||
|
|
||||||
|
programguide[str(chandict["number"])]["listing"].append(clean_prog_dict)
|
||||||
|
|
||||||
|
return programguide
|
||||||
|
|
||||||
|
def get_cached(self, time_list):
|
||||||
|
for times in time_list:
|
||||||
|
url = self.base_api_url + '/v2/channels?start=%s.000Z&stop=%s.000Z' % (times["start"], times["end"])
|
||||||
|
self.get_cached_item(times["start"], url)
|
||||||
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
|
||||||
|
return [self.fhdhr.db.get_cacheitem_value(x, "offline_cache", "origin") for x in cache_list]
|
||||||
|
|
||||||
|
def get_cached_item(self, cache_key, url):
|
||||||
|
cache_key = datetime.datetime.strptime(cache_key, '%Y-%m-%dT%H:%M:%S').timestamp()
|
||||||
|
cacheitem = self.fhdhr.db.get_cacheitem_value(str(cache_key), "offline_cache", "origin")
|
||||||
|
if cacheitem:
|
||||||
|
self.fhdhr.logger.info('FROM CACHE: ' + str(cache_key))
|
||||||
|
return cacheitem
|
||||||
|
else:
|
||||||
|
self.fhdhr.logger.info('Fetching: ' + url)
|
||||||
|
try:
|
||||||
|
resp = self.fhdhr.web.session.get(url)
|
||||||
|
except self.fhdhr.web.exceptions.HTTPError:
|
||||||
|
self.fhdhr.logger.info('Got an error! Ignoring it.')
|
||||||
|
return
|
||||||
|
result = resp.json()
|
||||||
|
|
||||||
|
self.fhdhr.db.set_cacheitem_value(str(cache_key), "offline_cache", result, "origin")
|
||||||
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
|
||||||
|
cache_list.append(str(cache_key))
|
||||||
|
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", cache_list, "origin")
|
||||||
|
|
||||||
|
def remove_stale_cache(self, todaydate):
|
||||||
|
cache_clear_time = todaydate.strftime('%Y-%m-%dT%H:00:00')
|
||||||
|
cache_clear_time = datetime.datetime.strptime(cache_clear_time, '%Y-%m-%dT%H:%M:%S').timestamp()
|
||||||
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
|
||||||
|
cache_to_kill = []
|
||||||
|
for cacheitem in cache_list:
|
||||||
|
if float(cacheitem) < cache_clear_time:
|
||||||
|
cache_to_kill.append(cacheitem)
|
||||||
|
self.fhdhr.db.delete_cacheitem_value(str(cacheitem), "offline_cache", "origin")
|
||||||
|
self.fhdhr.logger.info('Removing stale cache: ' + str(cacheitem))
|
||||||
|
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", [x for x in cache_list if x not in cache_to_kill], "origin")
|
||||||
|
|
||||||
|
def clear_cache(self):
|
||||||
|
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
|
||||||
|
for cacheitem in cache_list:
|
||||||
|
self.fhdhr.db.delete_cacheitem_value(cacheitem, "offline_cache", "origin")
|
||||||
|
self.fhdhr.logger.info('Removing cache: ' + str(cacheitem))
|
||||||
|
self.fhdhr.db.delete_cacheitem_value("cache_list", "offline_cache", "origin")
|
||||||