1
0
mirror of https://github.com/fHDHR/fHDHR_NextPVR.git synced 2025-12-06 10:26:57 -05:00

Compare commits

...

292 Commits
v0.4.0 ... main

Author SHA1 Message Date
Deathbybandaid
e40543552a
Merge pull request #160 from deathbybandaid/dev
Update Readme.md with Deprecation Warning
2021-02-01 09:39:09 -05:00
deathbybandaid
c376cfcaa9 Update Readme.md with Deprecation Warning 2021-02-01 09:37:28 -05:00
Deathbybandaid
6ecf54a8d0
Merge pull request #159 from deathbybandaid/dev
Improve Config for Zero values
2021-02-01 08:03:39 -05:00
deathbybandaid
0d185e6704 Improve Config for Zero values 2021-02-01 08:01:52 -05:00
Deathbybandaid
09ddda62fc
Merge pull request #158 from deathbybandaid/dev
Channel Editor Web Fixings
2021-01-31 16:55:02 -05:00
deathbybandaid
b1d4e13c31 Channel Editor Web Fixings 2021-01-31 16:54:07 -05:00
Deathbybandaid
eaf26457f0
Merge pull request #157 from deathbybandaid/dev
Fix Handling of missing Versions for ffmpeg/vlc
2021-01-31 16:00:50 -05:00
deathbybandaid
b92247c810 Fix Handling of missing Versions for ffmpeg/vlc 2021-01-31 15:59:33 -05:00
Deathbybandaid
0fb454016d
Merge pull request #156 from deathbybandaid/dev
Dev
2021-01-31 15:31:01 -05:00
deathbybandaid
ab51ea02a1 Improve Detection of ffmpeg/vlc application paths 2021-01-31 15:24:19 -05:00
deathbybandaid
884d4b6e27 Improve Tuner Methods per Origin 2021-01-31 14:21:54 -05:00
Deathbybandaid
9c72f30a99
Merge pull request #155 from deathbybandaid/dev
Repair Tuner Scanning
2021-01-30 11:42:54 -05:00
deathbybandaid
72627510aa Repair Tuner Scanning 2021-01-30 11:41:57 -05:00
Deathbybandaid
9caeac2f41
Merge pull request #154 from deathbybandaid/dev
Dev
2021-01-30 11:11:36 -05:00
deathbybandaid
d03e575f0b Repair m3u/w3u 2021-01-30 11:10:16 -05:00
deathbybandaid
54e1e72104 Repair Stream Plugins 2021-01-30 11:00:58 -05:00
Deathbybandaid
b16fcf3b51
Merge pull request #153 from deathbybandaid/dev
Bugfixes
2021-01-30 10:42:48 -05:00
deathbybandaid
e86290e9fe Bugfixes 2021-01-30 10:41:46 -05:00
Deathbybandaid
0de184c242
Merge pull request #152 from deathbybandaid/dev
repair hdhr
2021-01-29 16:22:20 -05:00
deathbybandaid
5f4092bdc8 repair hdhr 2021-01-29 16:21:02 -05:00
Deathbybandaid
d9cda8b1d4
Merge pull request #151 from deathbybandaid/dev
Rebuild Plugin System and seperate core functionality into Plugins
2021-01-29 15:37:45 -05:00
deathbybandaid
a7c854bcd4 Rebuild Plugin System and seperate core functionality into Plugins 2021-01-29 15:29:05 -05:00
Deathbybandaid
13faf0845e
Merge pull request #150 from deathbybandaid/dev
Strip invalid Config Options
2021-01-23 18:52:16 -05:00
deathbybandaid
1cf2a7acce Strip invalid Config Options 2021-01-23 18:50:51 -05:00
Deathbybandaid
36712e7ba0
Merge pull request #149 from deathbybandaid/dev
Update CSS styling
2021-01-23 18:31:48 -05:00
deathbybandaid
02e825978b Update CSS styling 2021-01-23 18:30:24 -05:00
Deathbybandaid
9642feecae
Merge pull request #148 from deathbybandaid/dev
Make alt_stream methods into plugins and enhance Config system
2021-01-23 17:59:41 -05:00
deathbybandaid
b8ce4f4e8a Make alt_stream methods into plugins and enhance Config system 2021-01-23 17:50:14 -05:00
Deathbybandaid
22955ce11f
Merge pull request #147 from deathbybandaid/dev
Implement Plugin based Installation
2021-01-22 15:49:38 -05:00
deathbybandaid
1aa35b66f0 Implement Plugin based Installation 2021-01-22 15:35:04 -05:00
Deathbybandaid
cbe8deb965
Merge pull request #146 from deathbybandaid/dev
Improve Channel Creation Logic
2021-01-22 11:40:33 -05:00
deathbybandaid
d118ef7807 Improve Channel Creation Logic 2021-01-22 11:38:35 -05:00
Deathbybandaid
c9b20743fd
Merge pull request #145 from deathbybandaid/dev
Improve future modularity
2021-01-22 08:52:33 -05:00
deathbybandaid
1b13aedc5e Improve future modularity 2021-01-22 08:49:16 -05:00
Deathbybandaid
ddcb04892b
Merge pull request #144 from deathbybandaid/dev
Make alt_epg loading more abstract
2021-01-21 15:29:44 -05:00
deathbybandaid
6076011d1c Make alt_epg loading more abstract 2021-01-21 15:20:28 -05:00
Deathbybandaid
751eaebee9
Merge pull request #143 from deathbybandaid/dev
Update Streaming Config Docs
2021-01-21 12:23:49 -05:00
deathbybandaid
53dc0e127d Update Streaming Config Docs 2021-01-21 12:17:39 -05:00
Deathbybandaid
acf72ad109
Merge pull request #142 from deathbybandaid/dev
Implement Transcode Profiles for ffmpeg/vlc
2021-01-21 11:49:23 -05:00
deathbybandaid
630b8dbf2b Implement Transcode Profiles for ffmpeg/vlc 2021-01-21 11:46:44 -05:00
Deathbybandaid
e8aa5bd3f4
Merge pull request #141 from deathbybandaid/dev
Dev
2021-01-21 10:24:12 -05:00
deathbybandaid
03927ec495 Logging that Direct Method cannot transcode 2021-01-21 09:43:33 -05:00
deathbybandaid
6a924cbca2 Allow ffmpeg/vlc to select quality when origin_quality is set to None 2021-01-21 09:33:26 -05:00
Deathbybandaid
4bd2ff971e
Merge pull request #140 from deathbybandaid/dev
Improve Streams with Headers and Quality Options
2021-01-20 16:09:51 -05:00
deathbybandaid
63685f4a0e Improve Streams with Headers and Quality Options 2021-01-20 16:01:32 -05:00
Deathbybandaid
70fe2f3814
Merge pull request #139 from deathbybandaid/dev
Repair web Logging and Add Version Patch Number
2021-01-20 12:27:40 -05:00
deathbybandaid
c0ff51a6db Repair web Logging and Add Version Patch Number 2021-01-20 12:24:44 -05:00
Deathbybandaid
8a3f8d919d
Merge pull request #138 from deathbybandaid/dev
Add EPG API Method
2021-01-20 11:25:01 -05:00
deathbybandaid
e09bb5d83b Add EPG API Method 2021-01-20 11:21:58 -05:00
Deathbybandaid
b72a26c96c
Merge pull request #137 from deathbybandaid/dev
Update Docs and move Stream Method Setting
2021-01-20 11:01:49 -05:00
deathbybandaid
35e6bb707d Update Docs and move Stream Method Setting 2021-01-20 10:53:20 -05:00
Deathbybandaid
fc0708d888
Merge pull request #136 from deathbybandaid/dev
Move Logger
2021-01-18 09:03:48 -05:00
deathbybandaid
9d25b63f99 Move Logger 2021-01-18 08:54:21 -05:00
Deathbybandaid
af9fbe97b9
Merge pull request #135 from deathbybandaid/dev
Catch last channel entries on page
2021-01-17 11:42:26 -05:00
deathbybandaid
a36b9e8143 Catch last channel entries on page 2021-01-17 11:40:35 -05:00
Deathbybandaid
ef82a3b5e7
Merge pull request #134 from deathbybandaid/dev
Enable EPG Config Settings
2021-01-16 09:02:12 -05:00
deathbybandaid
6bf4319316 Enable EPG Config Settings 2021-01-16 08:59:33 -05:00
Deathbybandaid
9ad3f00482
Merge pull request #133 from deathbybandaid/dev
Repair Tuner Page Table
2021-01-15 08:54:45 -05:00
deathbybandaid
31f7213006 Repair Tuner Page Table 2021-01-15 08:41:43 -05:00
Deathbybandaid
ccd99a7008
Merge pull request #132 from deathbybandaid/dev
Enhance Channel Editor
2021-01-14 09:36:22 -05:00
deathbybandaid
56d7a74ee4 Enhance Channel Editor 2021-01-14 09:33:36 -05:00
Deathbybandaid
e3d8f64c5c
Merge pull request #131 from deathbybandaid/dev
Make Universal Brython Functions
2021-01-13 11:15:35 -05:00
deathbybandaid
28383d89ec Make Universal Brython Functions 2021-01-13 11:12:20 -05:00
Deathbybandaid
d915e4cbed
Merge pull request #130 from deathbybandaid/dev
Repair Tab/Space issues in Templates
2021-01-13 10:15:20 -05:00
deathbybandaid
51b9a85597 Repair Tab/Space issues in Templates 2021-01-13 10:10:10 -05:00
Deathbybandaid
051fb87add
Merge pull request #129 from deathbybandaid/dev
Move API Notifications
2021-01-12 16:24:44 -05:00
deathbybandaid
796a5e9eec Move API Notifications 2021-01-12 16:21:32 -05:00
Deathbybandaid
7348101eea
Merge pull request #128 from deathbybandaid/dev
Dev
2021-01-12 12:05:21 -05:00
deathbybandaid
fb1f5f2324 use session id as stream client id 2021-01-12 12:03:17 -05:00
deathbybandaid
887b60b80d Implement w3u standard 2021-01-12 11:56:17 -05:00
Deathbybandaid
698e407c38
Merge pull request #127 from deathbybandaid/dev
Add Additional Channel Creation Handling
2021-01-11 12:57:10 -05:00
deathbybandaid
8c80c51c2a Add Additional Channel Creation Handling 2021-01-11 12:53:22 -05:00
Deathbybandaid
1eca980ae5
Merge pull request #126 from deathbybandaid/dev
Add Additional Error Handling for Streams
2021-01-11 10:19:10 -05:00
deathbybandaid
308c56da09 Add Additional Error Handling for Streams 2021-01-11 10:16:11 -05:00
Deathbybandaid
404f44c22d
Merge pull request #125 from deathbybandaid/dev
More String Formatting
2021-01-11 09:24:51 -05:00
deathbybandaid
cd24c5a4fe More String Formatting 2021-01-11 08:58:48 -05:00
Deathbybandaid
db49d28de5
Merge pull request #124 from deathbybandaid/dev
Reduce CPU Usage
2021-01-09 15:52:22 -05:00
deathbybadaid
1f23425be5 Reduce CPU Usage 2021-01-09 15:44:59 -05:00
Deathbybandaid
4efdded7e1
Merge pull request #123 from deathbybandaid/dev
Update String Formatting for Alternative_EPG
2021-01-08 15:58:48 -05:00
deathbybadaid
b4d8ed6e4d Update String Formatting for Alternative_EPG 2021-01-08 15:57:50 -05:00
Deathbybandaid
cdbe545df6
Merge pull request #122 from deathbybandaid/dev
Update String Formatting
2021-01-08 15:38:25 -05:00
deathbybadaid
d1038ab46a Update String Formatting 2021-01-08 15:35:55 -05:00
Deathbybandaid
8f9208c2cb
Merge pull request #121 from deathbybandaid/dev
Update Origin_web
2021-01-08 12:38:09 -05:00
deathbybadaid
93e07fd771 Update Origin_web 2021-01-08 12:35:18 -05:00
Deathbybandaid
f5967e718a
Merge pull request #120 from deathbybandaid/dev
Enhance M3U and Stream URLs
2021-01-08 11:13:57 -05:00
deathbybadaid
32252e34c9 Enhance M3U and Stream URLs 2021-01-08 11:11:43 -05:00
Deathbybandaid
7e2accd2d2
Merge pull request #119 from deathbybandaid/dev
Enhance Diagnostics Page
2021-01-08 10:29:38 -05:00
deathbybadaid
244472792e Enhance Diagnostics Page 2021-01-08 10:25:35 -05:00
Deathbybandaid
418b23e96b
Merge pull request #118 from deathbybandaid/dev
Web Enhancements
2021-01-08 09:36:47 -05:00
deathbybadaid
d9e0cc13dd Web Enhancements 2021-01-08 09:29:15 -05:00
Deathbybandaid
c444d3123c
Merge pull request #117 from deathbybandaid/dev
Add missing comma to m3u
2021-01-07 17:28:26 -05:00
deathbybandaid
e003d502c2 Add missing comma to m3u 2021-01-07 17:25:47 -05:00
Deathbybandaid
a1e9e28e64
Merge pull request #116 from deathbybandaid/dev
Implement access_levels and route_list
2021-01-07 15:00:32 -05:00
deathbybandaid
4093a8c135 Implement access_levels and route_list 2021-01-07 14:56:02 -05:00
Deathbybandaid
e7282522b5
Merge pull request #115 from deathbybandaid/dev
Add Restart Button to Web Interface
2021-01-07 11:56:06 -05:00
deathbybandaid
602e74f565 Add Restart Button to Web Interface 2021-01-07 11:54:02 -05:00
Deathbybandaid
46a6043e62
Merge pull request #114 from deathbybandaid/dev
Repair Tuner Channel Scan URL
2021-01-07 09:35:39 -05:00
deathbybandaid
d87ef97494 Repair Tuner Channel Scan URL 2021-01-07 09:32:52 -05:00
Deathbybandaid
038bd03b42
Merge pull request #113 from deathbybandaid/dev
Dev
2021-01-07 08:49:58 -05:00
deathbybandaid
cd47fa0a3f Add Better EPG Logging 2021-01-07 08:40:25 -05:00
deathbybandaid
cf64aecf7b Streaming Enhancements 2021-01-06 14:25:45 -05:00
Deathbybandaid
b6ef8b13ae
Merge pull request #112 from deathbybandaid/dev
Update Version Numbers for Next Release
2021-01-06 09:51:39 -05:00
deathbybandaid
90fb90a92e Update Version Numbers for Next Release 2021-01-06 09:49:21 -05:00
Deathbybandaid
0441e731d4
Merge pull request #111 from deathbybandaid/dev
Missing __init__.py
2021-01-06 09:14:39 -05:00
deathbybandaid
e87af73215 Missing __init__.py 2021-01-06 09:06:48 -05:00
Deathbybandaid
f1dab94210
Merge pull request #110 from deathbybandaid/dev
Patch EPG timeout Error
2021-01-06 09:00:45 -05:00
deathbybadaid
75a8492dbe Patch EPG timeout Error 2021-01-06 08:58:00 -05:00
Deathbybandaid
c7c5efdc4e
Merge pull request #109 from DanAustinGH/dev-ui
Core UI changes
2021-01-06 08:18:00 -05:00
DanAustinGH
51d9728d2a Core UI changes 2021-01-05 16:26:10 -07:00
Deathbybandaid
27a8045fc1
Merge pull request #108 from deathbybandaid/dev
Improve Internal API System
2021-01-05 14:07:22 -05:00
deathbybandaid
8a475f154a Improve Internal API System 2021-01-05 14:01:19 -05:00
Deathbybandaid
64a6e4a635
Merge pull request #107 from deathbybandaid/dev
Dev
2021-01-05 12:05:26 -05:00
deathbybadaid
e9e0e40d78 Fix Streaming 2021-01-05 12:04:41 -05:00
deathbybadaid
c0acaa736e Bugfix 2021-01-05 11:49:01 -05:00
Deathbybandaid
f733b8c9df
Merge pull request #106 from deathbybandaid/dev
Strip Multiprocessing and Re-add Windows Support
2021-01-05 11:33:09 -05:00
deathbybadaid
3f314f3863 Strip Multiprocessing and Re-add Windows Support 2021-01-05 11:28:40 -05:00
Deathbybandaid
0c09e1dca0
Merge pull request #105 from deathbybandaid/dev
Dev
2021-01-05 11:08:37 -05:00
deathbybadaid
5c66f2594e Improve Tuner Grabbing and closing 2021-01-05 11:06:14 -05:00
deathbybadaid
1199489cc3 Fix EPG Update Typos 2021-01-05 10:55:56 -05:00
Deathbybandaid
1ab992fa76
Merge pull request #104 from deathbybandaid/dev
Add Detection of Internal API calls
2021-01-05 09:51:41 -05:00
deathbybadaid
2cf0f3d891 Add Detection of Internal API calls 2021-01-05 09:47:34 -05:00
Deathbybandaid
fb44ee3bde
Merge pull request #103 from deathbybandaid/dev
Update Stream Methods
2021-01-04 10:21:58 -05:00
deathbybandaid
e7e4ddcade Update Stream Methods 2021-01-04 10:18:52 -05:00
Deathbybandaid
dace8fa650
Merge pull request #102 from deathbybandaid/dev
SSDP Enhancements
2021-01-02 17:41:02 -05:00
deathbybandaid
8fc69ba973 SSDP Enhancements 2021-01-02 17:27:31 -05:00
Deathbybandaid
87633c356e
Merge pull request #101 from deathbybandaid/dev
Patch User-Agent detection
2021-01-02 15:53:13 -05:00
deathbybandaid
341d905ea2 Patch User-Agent detection 2021-01-02 15:44:31 -05:00
Deathbybandaid
c05ae6ac71
Merge pull request #100 from deathbybandaid/dev
Fix Lineup Tab issue
2021-01-02 15:03:41 -05:00
deathbybandaid
aec09eade1 Fix Lineup Tab issue 2021-01-02 15:00:12 -05:00
Deathbybandaid
28a9886960
Merge pull request #99 from deathbybandaid/dev
Improve Channel Sorting Globally
2021-01-02 12:36:26 -05:00
deathbybandaid
adae4d77c7 Improve Channel Sorting Globally 2021-01-02 12:33:11 -05:00
Deathbybandaid
cebce2f1ba
Merge pull request #98 from deathbybandaid/dev
Dev
2021-01-01 18:25:55 -05:00
deathbybandaid
2cf4f4249b Fix Blocks EPG 2021-01-01 18:22:41 -05:00
deathbybandaid
fa6e3bdd50 Fix EPG 2021-01-01 18:18:23 -05:00
Deathbybandaid
a981d8d845
Merge pull request #97 from deathbybandaid/dev
Enhance Channel Loading
2021-01-01 18:08:49 -05:00
deathbybandaid
39649a11f8 Enhance Channel Loading 2021-01-01 18:05:36 -05:00
Deathbybandaid
2858016ad7
Merge pull request #96 from deathbybandaid/dev
Ensure database values are preserved when loaded into the UI.
2020-12-29 13:41:56 -05:00
deathbybandaid
77f941c08a Ensure database values are preserved when loaded into the UI. 2020-12-29 13:38:43 -05:00
Deathbybandaid
9f37dfa6b3
Merge pull request #95 from deathbybandaid/dev
Dev
2020-12-20 11:29:35 -05:00
deathbybandaid
e3a264f24e Use session to store the DeviceAuth 2020-12-20 11:22:46 -05:00
deathbybandaid
2cd30a38ca Use session to detect if PlexMediaServer 2020-12-20 11:13:00 -05:00
deathbybandaid
b12996b8bb Create Flask Session system and mobile device detection 2020-12-20 10:52:51 -05:00
deathbybandaid
7e4eea1d76 Cleanup obsolete code from first webUI iteration 2020-12-20 10:26:20 -05:00
Deathbybandaid
b8be38db68
Merge pull request #94 from deathbybandaid/dev
Add Mass Scale Channel Edit API Method
2020-12-18 10:29:21 -05:00
deathbybandaid
5b1c9b303b Add Mass Scale Channel Edit API Method 2020-12-18 10:08:19 -05:00
Deathbybandaid
0b8a5104a9
Merge pull request #93 from deathbybandaid/dev
Dev
2020-12-18 09:02:18 -05:00
deathbybandaid
0aea878ebe More EPG Enhancements 2020-12-18 08:54:07 -05:00
deathbybandaid
86c14c2d9b Reformat xmltv Date 2020-12-18 07:59:39 -05:00
Deathbybandaid
eaf5e89113
Merge pull request #92 from deathbybandaid/dev
Dev
2020-12-17 14:12:24 -05:00
deathbybandaid
ca96d4de34 Add More Thumbnail Handling 2020-12-17 14:06:36 -05:00
deathbybandaid
9c481d4103 Add Hidden Tools Page 2020-12-16 16:02:25 -05:00
Deathbybandaid
1d595d8261
Merge pull request #91 from deathbybandaid/dev
Implement internal client system
2020-12-16 10:40:39 -05:00
deathbybandaid
845efb0719 Implement internal client system 2020-12-16 09:42:12 -05:00
Deathbybandaid
91926a2dcf
Merge pull request #90 from deathbybandaid/dev
Dev
2020-12-16 09:38:21 -05:00
deathbybandaid
0cc13306ab Sleep to make sure Flask is ready for startup tasks 2020-12-16 08:39:42 -05:00
deathbybandaid
d47ecee009 EPG Update Bugfix 2020-12-16 08:25:46 -05:00
Deathbybandaid
60455b6a84
Merge pull request #89 from deathbybandaid/dev
Dev
2020-12-15 13:35:18 -05:00
deathbybandaid
886b257228 Add Tuner API Status Method 2020-12-15 13:30:59 -05:00
deathbybandaid
3f8ff15e97 Move Startup Tasks to API 2020-12-15 12:59:53 -05:00
Deathbybandaid
baf4cf461c
Merge pull request #88 from deathbybandaid/dev
Dev
2020-12-15 11:50:05 -05:00
deathbybandaid
73c5f23ed7 Add Tweak for future UDP variants 2020-12-15 11:43:34 -05:00
deathbybandaid
61779991ca Change Default Logging Level to INFO 2020-12-15 11:33:51 -05:00
deathbybandaid
1143b158da Add Channel Import Logging 2020-12-15 11:27:41 -05:00
Deathbybandaid
40971388ce
Merge pull request #87 from deathbybandaid/dev
Dev
2020-12-14 13:49:26 -05:00
deathbybandaid
7f6d80fd3e EPG Enhancements 2020-12-14 11:03:45 -05:00
deathbybandaid
55ca03b389 Improve Missing Channel EPG System 2020-12-10 16:01:49 -05:00
Deathbybandaid
d101717e8f
Merge pull request #86 from deathbybandaid/dev
Dev
2020-12-10 15:18:52 -05:00
deathbybandaid
a8972371c2 Enhance Channel Numbering system 2020-12-10 14:27:08 -05:00
deathbybandaid
d5c7a1ea47 Allow Cluster Bar Hiding 2020-12-10 12:19:56 -05:00
deathbybandaid
afed209051 Add Advanced Button 2020-12-10 12:06:24 -05:00
deathbybandaid
328320192a Move Web Conf 2020-12-10 11:59:21 -05:00
deathbybandaid
71da17fa45 Enhance Config Variable Validation 2020-12-10 11:35:26 -05:00
Deathbybandaid
295a7259ac
Merge pull request #85 from deathbybandaid/dev
Dev
2020-12-10 10:29:27 -05:00
deathbybandaid
e00cec1ed6 Move Origin Web Page and Create API framework 2020-12-10 09:41:45 -05:00
deathbybandaid
8b870430ad Update EPG Directory Structure 2020-12-09 16:26:43 -05:00
deathbybandaid
01c7b7fd99 Enable tvtv 2020-12-09 16:01:59 -05:00
deathbybandaid
6786f10812 Add tvtv EPG method 2020-12-09 16:00:14 -05:00
deathbybandaid
5ff0cdf93c Bugfix for Origin EPG 2020-12-09 15:22:33 -05:00
deathbybandaid
105e1a34a5 Update Zap2it 2020-12-09 15:16:18 -05:00
deathbybandaid
eb55403bee EPG/XMLTV Tweaks 2020-12-09 15:11:39 -05:00
Deathbybandaid
142b9fd3ab
Merge pull request #84 from deathbybandaid/dev
Bugfix
2020-12-09 12:16:29 -05:00
deathbybandaid
bfdbeded3a Bugfix 2020-12-09 12:13:01 -05:00
Deathbybandaid
295599ac55
Merge pull request #83 from deathbybandaid/dev
Update Conf
2020-12-08 15:36:07 -05:00
deathbybandaid
b75d4284d0 Update Conf 2020-12-08 15:16:35 -05:00
Deathbybandaid
f9da9d8a72
Merge pull request #82 from deathbybandaid/dev
Update Guide Page with Conditional Play Link
2020-12-08 14:55:44 -05:00
deathbybandaid
10311a767e Update Guide Page with Conditional Play Link 2020-12-08 14:53:45 -05:00
Deathbybandaid
bf312bc009
Merge pull request #81 from deathbybandaid/dev
Dev
2020-12-08 13:26:33 -05:00
deathbybandaid
f8429883a6 Add Web Version 2020-12-08 13:19:50 -05:00
deathbybandaid
0304bdb5e5 Enhancements 2020-12-08 12:17:55 -05:00
deathbybandaid
5d4373f503 Update main Loader tor reflect prior commits 2020-12-08 09:45:32 -05:00
deathbybandaid
0bf33c3209 Move Web related data to fHDHR_web 2020-12-08 09:42:41 -05:00
deathbybandaid
c389801a48 Seperate fHDHR_web 2020-12-08 09:27:51 -05:00
deathbybandaid
835f98db43 Update Versions 2020-12-08 09:10:35 -05:00
deathbybandaid
0afe2339a3 Increase Channel Creation Logging 2020-12-08 08:43:46 -05:00
deathbybandaid
35c85b840c Channel Creation timestamp and allow Origin to set Enablement 2020-12-08 08:19:20 -05:00
Deathbybandaid
b3ccc3df43
Create stale.yml 2020-12-08 07:52:17 -05:00
Deathbybandaid
83b16f1460
Merge pull request #80 from deathbybandaid/dev
Dev
2020-12-07 16:39:36 -05:00
deathbybandaid
87f7c8f74d Bugfix and More Enhancements 2020-12-07 16:36:00 -05:00
deathbybandaid
396a826d98 More Enhancements 2020-12-07 16:20:08 -05:00
Deathbybandaid
e5dc890064
Merge pull request #79 from deathbybandaid/dev
Dev
2020-12-07 15:01:26 -05:00
deathbybandaid
b8b39915dc Bugfix 2020-12-07 14:58:11 -05:00
deathbybandaid
e15b8c2257 Add More Logging 2020-12-07 14:49:46 -05:00
Deathbybandaid
1b7e5cd2ba
Merge pull request #78 from deathbybandaid/dev
Dev
2020-12-07 14:27:48 -05:00
deathbybandaid
5640ce26a8 Improve XMLTV 2020-12-07 14:22:53 -05:00
deathbybandaid
daa5ac92cb Code Restructure, Channel/EPG Enhancements 2020-12-07 13:21:53 -05:00
Deathbybandaid
e3646e7b9d
Merge pull request #77 from deathbybandaid/dev
Implement API and Interface Enablement for ALL channels
2020-12-04 15:27:12 -05:00
deathbybandaid
f8cee0867e Implement API and Interface Enablement for ALL channels 2020-12-04 15:25:55 -05:00
Deathbybandaid
05877993f4
Merge pull request #76 from deathbybandaid/dev
Implement SSDP Alive Refresh
2020-12-04 10:12:37 -05:00
deathbybandaid
7ffd33ae51 Implement SSDP Alive Refresh 2020-12-04 10:08:05 -05:00
Deathbybandaid
5aa38ebd17
Merge pull request #75 from deathbybandaid/dev
Adjust SSDP Cluster Info Grabbing
2020-12-04 08:56:40 -05:00
deathbybandaid
33dd833cde Adjust SSDP Cluster Info Grabbing 2020-12-04 08:53:22 -05:00
Deathbybandaid
3c944c726a
Merge pull request #74 from deathbybandaid/dev
Implement Plex Remote Media Grabber
2020-12-04 08:42:16 -05:00
deathbybandaid
34ca98881f Implement Plex Remote Media Grabber 2020-12-04 08:25:48 -05:00
Deathbybandaid
373b5c61a1
Merge pull request #73 from deathbybandaid/dev
Use logger for Flask
2020-12-02 11:28:59 -05:00
deathbybandaid
d4dacc5f3b Use logger for Flask 2020-12-02 11:25:02 -05:00
Deathbybandaid
53fd528f38
Merge pull request #72 from deathbybandaid/dev
Allow Cross Origin CSS Theming via cache
2020-12-02 10:01:53 -05:00
deathbybandaid
8661c77089 Allow Cross Origin CSS Theming via cache 2020-12-02 09:47:33 -05:00
Deathbybandaid
5eb4be8cd1
Merge pull request #71 from deathbybandaid/dev
Dev
2020-12-01 15:37:16 -05:00
deathbybandaid
4fa82037b8 Allow CSS Theming 2020-12-01 15:21:24 -05:00
deathbybandaid
3e6bc72c47 Adjust Page Layout 2020-12-01 15:07:10 -05:00
deathbybandaid
5fd5d7f0ff Missing t 2020-12-01 15:06:25 -05:00
Deathbybandaid
07beae12d8
Merge pull request #70 from deathbybandaid/dev
Update Version to v0.4.5-beta
2020-12-01 09:49:47 -05:00
deathbybandaid
a1bd113afa Update Version to v0.4.5-beta 2020-12-01 09:46:25 -05:00
Deathbybandaid
1c1e392430
Merge pull request #69 from deathbybandaid/dev
Dev
2020-12-01 09:40:44 -05:00
deathbybandaid
e54005247a Enable Thumbnail Updating 2020-12-01 09:40:03 -05:00
deathbybandaid
85d3099f96 Create seperate Channels Editor page 2020-12-01 08:55:33 -05:00
Deathbybandaid
e763874a62
Merge pull request #68 from deathbybandaid/dev
Tuner Numbers should start at 0
2020-11-30 14:58:46 -05:00
deathbybandaid
30986a6d3f Tuner Numbers should start at 0 2020-11-30 14:53:44 -05:00
Deathbybandaid
48db9dcbd1
Merge pull request #67 from deathbybandaid/dev
Dev
2020-11-30 14:18:47 -05:00
deathbybandaid
e9843e530b Improve Direct streams 2020-11-30 13:18:55 -05:00
deathbybandaid
cc8e8a8715 Create Client IDs for Tuners 2020-11-30 12:31:10 -05:00
Deathbybandaid
2c9fd12d6a
Merge pull request #66 from deathbybandaid/dev
Dev
2020-11-30 11:13:50 -05:00
deathbybandaid
c61df94ae4 Add Downloaded Size Parameter to Tuner Status 2020-11-30 10:56:14 -05:00
deathbybandaid
cd88725dd9 Add Information to Channels Page 2020-11-30 08:49:25 -05:00
deathbybandaid
f45fdf7719 Improve Channel Update from Web Interface 2020-11-30 08:38:44 -05:00
deathbybandaid
8186e687b2 Move Channel Update Button to Channels Page 2020-11-30 08:22:06 -05:00
Deathbybandaid
4c83b93a5d
Merge pull request #65 from deathbybandaid/dev
Further Channel Enhancements
2020-11-29 19:06:13 -05:00
deathbybandaid
27917cc818 Further Channel Enhancements 2020-11-29 19:04:22 -05:00
Deathbybandaid
ea3d0dff96
Merge pull request #64 from deathbybandaid/dev
Dev
2020-11-29 13:12:18 -05:00
deathbybandaid
e8672c39bb Add Configuration of Channel Thumbnail 2020-11-29 13:10:28 -05:00
deathbybandaid
908ea889a4 Adjust table sizing 2020-11-29 12:58:51 -05:00
Deathbybandaid
6deaabe56a
Merge pull request #63 from deathbybandaid/dev
Dev
2020-11-29 12:01:54 -05:00
deathbybandaid
3d7f76234a Verify how Channel Numbers are stored from remapping 2020-11-29 11:41:01 -05:00
deathbybandaid
993873769b chanscan_on_start True 2020-11-29 11:20:49 -05:00
deathbybandaid
283aee262f Add Channel Thumbnail Attribute 2020-11-28 19:03:54 -05:00
deathbybandaid
3b766f380e Improve direct stream chunk key url methods 2020-11-28 12:50:32 -05:00
deathbybandaid
b5e425e6d1 Enhance Blocks EPG Method 2020-11-28 12:46:36 -05:00
deathbybandaid
8a9f2b1361 Update Channel Page 2020-11-28 12:08:08 -05:00
deathbybandaid
32c40251c7 Skip Channel Scan on startup after initial run, unless configured 2020-11-28 11:28:35 -05:00
deathbybandaid
a564e039df Use absolute_uri for m3u8 keys 2020-11-28 11:25:48 -05:00
Deathbybandaid
5a6deed551
Merge pull request #62 from deathbybandaid/dev
Patch Channel ID
2020-11-25 10:51:32 -05:00
deathbybandaid
87dc737688 Patch Channel ID 2020-11-25 10:47:08 -05:00
Deathbybandaid
f01d012ddd
Merge pull request #61 from deathbybandaid/dev
Enhance Logging
2020-11-25 10:23:42 -05:00
deathbybandaid
9d103f8309 Enhance Logging 2020-11-25 10:17:29 -05:00
Deathbybandaid
795b43d1cf
Merge pull request #60 from deathbybandaid/dev
Enhance Channels System
2020-11-23 14:04:01 -05:00
deathbybandaid
1a007abec0 Enhance Channels System 2020-11-23 12:38:21 -05:00
Deathbybandaid
be631e18b2
Merge pull request #59 from deathbybandaid/dev
Upgrade Channels System
2020-11-20 16:26:46 -05:00
deathbybandaid
84735271a8 Upgrade Channels System 2020-11-20 15:40:12 -05:00
Deathbybandaid
87227bb020
Merge pull request #58 from deathbybandaid/dev
Dev
2020-11-20 09:03:55 -05:00
deathbybandaid
9afc544711 Add Improved Docs 2020-11-20 09:02:51 -05:00
deathbybandaid
aa01bff67e Minor enhancements 2020-11-20 08:00:33 -05:00
Deathbybandaid
bca32cd72f
Merge pull request #57 from deathbybandaid/dev
Add Settings Page, and overhaul Config system
2020-11-19 14:38:44 -05:00
deathbybandaid
38e98d7000 Add Settings Page, and overhaul Config system 2020-11-19 14:09:22 -05:00
Deathbybandaid
017d2dea42
Merge pull request #56 from deathbybandaid/dev2
Dev2
2020-11-19 09:04:20 -05:00
deathbybandaid
e98c53526d Upgrade all pages to Templates 2020-11-19 08:59:07 -05:00
deathbybandaid
873d58fa3a Patch M3U response to audio/x-mpegurl 2020-11-19 08:37:45 -05:00
Deathbybandaid
0c58ba0604
Merge pull request #55 from deathbybandaid/dev
Missing Quotation from M3U
2020-11-16 14:19:26 -05:00
deathbybandaid
af6d76df89 Missing Quotation from M3U 2020-11-16 14:15:45 -05:00
Deathbybandaid
e5ff8fb71b
Merge pull request #54 from deathbybandaid/dev
Dev
2020-11-16 09:28:25 -05:00
deathbybandaid
9c59328fca Add experimental threading method option for Windows Users 2020-11-16 09:10:09 -05:00
deathbybandaid
08e663113b replace pycrypto with pycryptodome 2020-11-16 08:42:12 -05:00
Deathbybandaid
de5018d084
Merge pull request #53 from crackers8199/patch-1
Update Dockerfile
2020-11-13 16:13:18 -05:00
Matt Greco
eba5e7d897
Update Dockerfile
addition of pycrypto as a requirement needs gcc in order for the container to install it
2020-11-13 13:11:23 -08:00
Deathbybandaid
f3ff2c37f6
Merge pull request #52 from deathbybandaid/dev
Require py3.7+
2020-11-13 15:51:00 -05:00
deathbybandaid
0df7c5f8f1 Require py3.7+ 2020-11-13 15:49:51 -05:00
Deathbybandaid
261bf042f3
Merge pull request #51 from deathbybandaid/dev
Drop Support For Windows
2020-11-13 15:17:58 -05:00
deathbybandaid
0721ddbdd9 Drop Support For Windows 2020-11-13 15:13:52 -05:00
Deathbybandaid
9fd7d8f205
Merge pull request #50 from deathbybandaid/dev
Dev
2020-11-13 14:18:35 -05:00
deathbybandaid
701f8a99c2 Patch Missing Line 2020-11-13 14:15:10 -05:00
deathbybandaid
f9883ab064 Improve Direct Streaming 2020-11-13 14:08:55 -05:00
Deathbybandaid
6bb31873b2
Merge pull request #49 from deathbybandaid/dev
EPG Streams Page Patch
2020-11-13 08:26:54 -05:00
deathbybandaid
9847f72e6c EPG Streams Page Patch 2020-11-13 08:23:35 -05:00
Deathbybandaid
8015c6e539
Merge pull request #48 from deathbybandaid/dev
Improve Tuner Handling
2020-11-12 14:51:02 -05:00
deathbybandaid
890cf7c3dd Improve Tuner Handling 2020-11-12 14:46:47 -05:00
Deathbybandaid
43493258e8
Merge pull request #47 from deathbybandaid/dev
EPG and VLC and Tuner Improvements
2020-11-11 12:47:33 -05:00
deathbybandaid
3f3fec7bf4 EPG and VLC and Tuner Improvements 2020-11-11 12:35:28 -05:00
Deathbybandaid
183bc46320
Merge pull request #46 from deathbybandaid/dev
More SSDP Error handling
2020-11-09 15:44:21 -05:00
deathbybandaid
407a2ef6f2 More SSDP Error handling 2020-11-09 15:43:36 -05:00
Deathbybandaid
931dc6bd52
Merge pull request #45 from deathbybandaid/dev
Dev
2020-11-09 14:34:53 -05:00
deathbybandaid
b0e5de9d43 Ensure spawn on Windows 2020-11-09 14:31:05 -05:00
deathbybandaid
50f724489e Add Version Information and Warnings 2020-11-09 14:10:35 -05:00
deathbybandaid
54ca7f3b13 Handle SSDP data error 2020-11-09 13:49:43 -05:00
210 changed files with 23146 additions and 3109 deletions

17
.github/stale.yml vendored Normal file
View File

@ -0,0 +1,17 @@
# Number of days of inactivity before an issue becomes stale
daysUntilStale: 60
# Number of days of inactivity before a stale issue is closed
daysUntilClose: 7
# Issues with these labels will never be considered stale
exemptLabels:
- pinned
- security
# Label to use when marking an issue as stale
staleLabel: wontfix
# Comment to post when marking an issue as stale. Set to `false` to disable
markComment: >
This issue has been automatically marked as stale because it has not had
recent activity. It will be closed if no further activity occurs. Thank you
for your contributions.
# Comment to post when closing a stale issue. Set to `false` to disable
closeComment: false

View File

@ -1,7 +1,7 @@
FROM python:3.8-slim
RUN apt-get -qq update && \
apt-get -qq -y install ffmpeg && \
apt-get -qq -y install ffmpeg gcc && \
apt-get autoclean && \
rm -rf /var/lib/apt/lists/*
@ -9,4 +9,4 @@ COPY ./ /app/
WORKDIR /app
RUN pip3 install -r requirements.txt
ENTRYPOINT ["python3", "/app/main.py", "--c", "/app/config/config.ini"]
ENTRYPOINT ["python3", "/app/main.py", "--config", "/app/config/config.ini"]

View File

@ -1,30 +1,22 @@
# fHDHR_NextPVR
<p align="center">fHDHR_NextPVR <img src="docs/images/logo.ico" alt="Logo"/></p>
Welcome to the world of streaming to Plex! We use some fancy python here to achieve a system of:
Welcome to the world of streaming content as a DVR device! We use some fancy python here to achieve a system of:
**f**un
**H**ome
**D**istribution
**H**iatus &
**H**iatus
**R**ecreation
(based off of original code from
Please Check the [Docs](docs/README.md) for Installation information.
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
* myself coding for locast2plex
)
PRs welcome for:
* Docker support
Please Check the repository wiki for Installation information.
Officially marking this Fork as Beta.
fHDHR is labeled as beta until we reach v1.0.0
Join us in `#fHDHR <irc://irc.freenode.net/#fHDHR>`_ on Freenode.
# !!NOTICE!!
To reduce code duplication between variants, I am moving to a plugin system.
The normal variant repos will stay active during the transition.

View File

@ -1,59 +0,0 @@
[main]
# uuid =
# cache_dir =
# servicename = NextPVR
# reponame = fHDHR_NextPVR
[fhdhr]
# address = 0.0.0.0
# discovery_address = 0.0.0.0
# port = 5004
# stream_type = direct
# tuner_count = 4
# friendlyname = fHDHR-NextPVR
# reporting_firmware_name = fHDHR_NextPVR
# reporting_manufacturer = BoronDust
# reporting_model = fHDHR
# reporting_firmware_ver = 20201001
# reporting_tuner_type = Antenna
# device_auth = fHDHR
[epg]
# images = pass
# method = origin
# update_frequency = 43200
[ffmpeg]
# ffmpeg_path = ffmpeg
# bytes_per_read = 1152000
[direct_stream]
# chunksize = 1048576
[logging]
# level = WARNING
[database]
# type = sqlite
# driver = None
[nextpvr]
# address = localhost
# port = 8866
# ssl =
# pin =
# weight = 300
[zap2it]
# delay = 5
# postalcode = None
# affiliate_id = gapzap
# country = USA
# device = -
# headendid = lineupId
# isoverride = True
# languagecode = en
# pref =
# timespan = 6
# timezone =
# userid = -

View File

@ -0,0 +1,39 @@
{
"database":{
"type":{
"value": "sqlite",
"config_file": true,
"config_web": false
},
"driver":{
"value": "none",
"config_file": true,
"config_web": false
},
"user":{
"value": "none",
"config_file": true,
"config_web": false
},
"pass":{
"value": "none",
"config_file": true,
"config_web": false
},
"host":{
"value": "none",
"config_file": true,
"config_web": false
},
"port":{
"value": "none",
"config_file": true,
"config_web": false
},
"name":{
"value": "none",
"config_file": true,
"config_web": false
}
}
}

View File

@ -0,0 +1,40 @@
{
"epg":{
"images":{
"value": "pass",
"config_file": true,
"config_web": true
},
"method":{
"value": "none",
"config_file": true,
"config_web": true
},
"update_frequency":{
"value": 43200,
"config_file": true,
"config_web": true
},
"reverse_days": {
"value": -1,
"config_file": true,
"config_web": true
},
"forward_days": {
"value": 7,
"config_file": true,
"config_web": true
},
"block_size": {
"value": 1800,
"config_file": true,
"config_web": true
}
,
"xmltv_offset": {
"value": "+0000",
"config_file": true,
"config_web": true
}
}
}

View File

@ -1,31 +0,0 @@
[main]
uuid =
cache_dir =
[fhdhr]
address = 0.0.0.0
discovery_address = 0.0.0.0
port = 5004
reporting_manufacturer = BoronDust
reporting_model = fHDHR
reporting_firmware_ver = 20201001
reporting_tuner_type = Antenna
device_auth = fHDHR
require_auth = False
[epg]
images = pass
[ffmpeg]
ffmpeg_path = ffmpeg
bytes_per_read = 1152000
[direct_stream]
chunksize = 1048576
[logging]
level = WARNING
[database]
type = sqlite
driver = None

View File

@ -0,0 +1,39 @@
{
"fhdhr":{
"address":{
"value": "0.0.0.0",
"config_file": true,
"config_web": true
},
"discovery_address":{
"value": "none",
"config_file": true,
"config_web": true
},
"port":{
"value": 5004,
"config_file": true,
"config_web": true
},
"device_auth":{
"value": "fHDHR",
"config_file": true,
"config_web": true
},
"require_auth":{
"value": false,
"config_file": true,
"config_web": true
},
"chanscan_on_start":{
"value": true,
"config_file": true,
"config_web": true
},
"friendlyname":{
"value": "fHDHR",
"config_file": true,
"config_web": true
}
}
}

View File

@ -0,0 +1,9 @@
{
"logging":{
"level":{
"value": "INFO",
"config_file": true,
"config_web": true
}
}
}

View File

@ -0,0 +1,24 @@
{
"main":{
"uuid":{
"value": "none",
"config_file": true,
"config_web": false
},
"cache_dir":{
"value": "none",
"config_file": true,
"config_web": true
},
"servicename":{
"value": "fHDHR",
"config_file": false,
"config_web": false
},
"reponame":{
"value": "fHDHR",
"config_file": false,
"config_web": false
}
}
}

View File

@ -1,25 +0,0 @@
[main]
servicename = NextPVR
dictpopname = nextpvr
reponame = fHDHR_NextPVR
required = nextpvr/pin
valid_epg_methods = None,blocks,origin,zap2it
[fhdhr]
friendlyname = fHDHR-NextPVR
stream_type = direct
tuner_count = 4
reporting_firmware_name = fHDHR_NextPVR
[epg]
method = origin
update_frequency = 43200
[nextpvr]
address = localhost
port = 8866
ssl = False
pin =
weight = 300
epg_update_frequency = 43200
sid =

View File

@ -0,0 +1,29 @@
{
"ssdp":{
"enabled":{
"value": true,
"config_file": true,
"config_web": false
},
"max_age":{
"value": 1800,
"config_file": true,
"config_web": false
},
"proto":{
"value": "ipv4",
"config_file": true,
"config_web": false
},
"iface":{
"value": "none",
"config_file": true,
"config_web": false
},
"multicast_address":{
"value": "none",
"config_file": true,
"config_web": false
}
}
}

View File

@ -0,0 +1,24 @@
{
"streaming":{
"bytes_per_read": {
"value": 1152000,
"config_file": true,
"config_web": true
},
"origin_quality": {
"value": "none",
"config_file": true,
"config_web": true
},
"transcode_quality": {
"value": "none",
"config_file": true,
"config_web": true
},
"method": {
"value": "direct",
"config_file": true,
"config_web": true
}
}
}

View File

@ -1,13 +0,0 @@
[zap2it]
delay = 5
postalcode =
affiliate_id = gapzap
country = USA
device = -
headendid = lineupId
isoverride = True
languagecode = en
pref =
timespan = 6
timezone =
userid = -

View File

@ -1,8 +0,0 @@
.pull-right { float: right; }
.pull-lef { float: left; }
.center {
margin-left: auto;
margin-right: auto;
}

191
docs/ADV_Config.md Normal file
View File

@ -0,0 +1,191 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
---
Here, we'll break down all of the configuration options per section.
## Main
Here's the `main` section.
* `uuid` will be created automatically, you need not worry about this.
* `cache_dir` is handy for keeping cached files out of the script directory. This is helpful for reinstalls as well as development.
````
[main]
# uuid =
# cache_dir =
````
## streaming
* `method` can be set to `ffmpeg`, `vlc` or `direct`.
* `bytes_per_read` determines how many bytes of the stream to read before sending the data to your client. Increasing this value may cause longer load times, and lowering it may effect `stuttering`.
* `origin_quality` can be set to high,medium,low for most variants. Variants that make use of m3u8 will Autoselect High for the direct method if not set. ffmpeg/vlc will determine the best stream on their own. Some Variants can allow alternative values.
* `transcode_quality` works with ffmpeg/vlc to use fHDHR for handling quality instead of the origin. Valid settings include: heavy,mobile,internet720,internet480,internet360,internet240
````
[streaming]
# method = direct
# bytes_per_read = 1152000
# origin_quality = None
# transcode_quality = None
````
## fhdhr
The `fhdhr` contains all the configuration options for interfacing between this script and your media platform.
* `address` and `port` are what we will allow the script to listen on. `0.0.0.0` is the default, and will respond to all.
* `discovery_address` may be helpful for making SSDP work properly. If `address` is not `0.0.0.0`, we will use that. If this is not set to a real IP, we won't run SSDP. SSDP is only really helpful for discovering in Plex/Emby. It's a wasted resource since you can manually add the `ip:port` of the script to Plex.
* `tuner_count` is a limit of devices able to stream from the script. The default is 3, as per Locast's documentation. A 4th is possible, but is not reccomended.
* `friendlyname` is to set the name that Plex sees the script as.
* `reporting_*` are settings that show how the script projects itself as a hardware device.
* `device_auth` and `require_auth` are for an unimplemented Authentication feature.
* `chanscan_on_start` Scans Origin for new channels at startup.
````
[fhdhr]
# address = 0.0.0.0
# discovery_address = 0.0.0.0
# port = 5004
# tuner_count = 4
# friendlyname = fHDHR-Locast
# reporting_firmware_name = fHDHR_Locast
# reporting_manufacturer = BoronDust
# reporting_model = fHDHR
# reporting_firmware_ver = 20201001
# reporting_tuner_type = Antenna
# device_auth = fHDHR
# require_auth = False
# chanscan_on_start = True
````
# EPG
* `images` can be set to `proxy` or `pass`. If you choose `proxy`, images will be reverse proxied through fHDHR.
* `method` defaults to `origin` and will pull the xmltv data from Locast. Other Options include `blocks` which is an hourly schedule with minimal channel information. Another option is `zap2it`, which is another source of EPG information. Channel Numbers may need to be manually mapped.
* `update_frequency` determines how often we check for new scheduling information. In Seconds.
* `reverse_days` allows Blocks of EPG data to be created prior to the start of the EPG Source data.
* `forward_days` allows Blocks of EPG data to be created after the end of the EPG Source data.
* `block_size` in seconds, sets the default block size for data before, after and missing timeslots.
* `xmltv_offset` allows the final xmltv file to have an offset for users with timezone issues.
````
[epg]
# images = pass
# method = origin
# update_frequency = 43200
# reverse_days = -1
# forward_days = 7
# block_size = 1800
# xmltv_offset = +0000
````
## ffmpeg
The `ffmpeg` section includes:
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
````
[ffmpeg]
# path = ffmpeg
````
## vlc
The `vlc` section includes:
* `path` is useful if ffmpeg is not in your systems PATH, or you want to manually specify.
````
[vlc]
# path = cvlc
````
# Logging
* `level` determines the amount of logging you wish to see in the console, as well as to the logfile (stored in your cache directory).
````
[logging]
# level = WARNING
````
# Database
* experiment with these settings at your own risk. We use sqlalchemy to provide database options, but we default to sqlite.
TODO: improve documentation here.
````
[database]
# type = sqlite
# driver = None
user = None
pass = None
host = None
port = None
name = None
````
## RMG
````
# enabled = True
````
## SSDP
````
# enabled = True
# max_age = 1800
# proto = ipv6
# iface = None
# multicast_address = None
````
## NextPVR
The `nextpvr` section
* What `address` to contact nextpvrat.
* what `port` does nextpvruse
* does nextpvruse `ssl`?
* `pin` is a required credential.
````
[nextpvr]
address = localhost
port = 8866
ssl =
pin =
````
## zap2it
`zap2it` contains a ton of configuration options, and defaults to options that in my experience don't need to be adjusted.
* `postalcode` is a value of importance, and is helpful. If not set, the script will attempt to retrieve your postalcode automatically.
````
[zap2it]
# delay = 5
# postalcode = None
# affiliate_id = gapzap
# country = USA
# device = -
# headendid = lineupId
# isoverride = True
# languagecode = en
# pref =
# timespan = 6
# timezone =
# userid = -
````

43
docs/Config.md Normal file
View File

@ -0,0 +1,43 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
---
The example config file contains all of the things that the typical user may need to fill out.
Please see the Advanced Configuration page for more information.
## fHDHR
Under `fhdhr`, you'll find 2 addresses listed. `0.0.0.0` works great for a listen address, however, it seems that SSDP works best if the discovery address is set to the IP to say that there is a service at.
````
[fhdhr]
# address = 0.0.0.0
# port = 5004
# discovery_address = 0.0.0.0
````
## NextPVR
NextPVR requires signin pin, so add that.
````
[nextpvr]
address = localhost
port = 8866
pin =
````

15
docs/Origin.md Normal file
View File

@ -0,0 +1,15 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
This varient of fHDHR connects to a local NextPVR instance.

46
docs/README.md Normal file
View File

@ -0,0 +1,46 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
# The Boring Disclaimers (at the top of the docs for a reason)
fHDHR is a Python service to take various sources of video and make them accessible to client software including, but not limited to:
* [Plex](https://www.plex.tv/)
* [Emby](https://emby.media/)
* [Jellyfin](https://jellyfin.org/)
* [Channels](https://getchannels.com/)
fHDHR is not directly affiliated with the above client software, and you will receive NO support for this script via their forums.
fHDHR is able to connect to clients by emulating a piece of hardware called the [HDHomeRun from SiliconDust](https://www.silicondust.com/). fHDHR is in NO way affiliated with SiliconDust, and is NOT a HDHomeRun device. fHDHR simply uses the API structure used by the authentic HDHomeRun to connect to client DVR solutions.
# History
I got the Huappage QuadHD, and the Mohu Sail as a pandemic-project. All was fine working within Plex, but I also have emby setup as a backup to Plex when auth is broken.
I thought to myself, "Self, I should look on github for a way to share my tv tuner between the two".
That's when I tried both npvrProxy with NextPVR as well as tvhProxy with TVHeadend. I had to tinker with both to get them working, but I started testing which one I liked more.
Around this same time, I stumbled upon [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex). I wanted to contribute to that project to get it to a point that I could fork it to work for other video stream sources.
The locast2plex code development wasn't going quite fast enough for the feature-creep in my head.
I then proceded to create the initial iteration of fHDHR which I originally called "FakeHDHR". I've rewritten the core functionality a few times before landing on the current code structure, which feels 'right'.
I've worked really hard to create a structure that simplifies new variants of the core code to work with different 'origin' streams. Combining these works really well with [xTeVe](https://github.com/xteve-project/xTeVe).
One of the variants goes as far as scraping a table from a PDF file for creating a channel guide!
I can easily create more variants of the project to do other video sources. Paid ones, I could potentially accept donations for, as I don't want to pay to develop for multiple platforms.

26
docs/Related-Projects.md Normal file
View File

@ -0,0 +1,26 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
While the fHDHR reops share very little code from the below projects, they were a source of inspiration:
* [tvhProxy by jkaberg](https://github.com/jkaberg/tvhProxy)
* [locast2plex by tgorgdotcom](https://github.com/tgorgdotcom/locast2plex)
Aside from the above, these other projects are worth a look as well:
* [npvrProxy](https://github.com/rogueosb/npvrProxy)
* [xTeVe](https://xteve.de/)
* [telly](https://github.com/tellytv/telly)
* [dizquetv](https://github.com/vexorian/dizquetv)

129
docs/Usage.md Normal file
View File

@ -0,0 +1,129 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
---
# Author Notes
* All Testing is currently done in Proxmox LXC, Ubuntu 20.04, Python 3.8
# Prerequisites
* A Linux or Mac "Server". Windows currently does not work. A "Server" is a computer that is typically always online.
* Python 3.7 or later.
* Consult [This Page](Origin.md) for additional setup specific to this variant of fHDHR.
# Optional Prerequisites
* If you intend to use Docker, [This Guide](https://docs.docker.com/get-started/) should help you get started. The author of fHDHR is not a docker user, but will still try to help.
fHDHR uses direct connections with video sources by default. Alternatively, you can install and update the [config](Config.md) accordingly. You will need to make these available to your systems PATH, or manually set their path via the config file.
* ffmpeg
* vlc
# Installation
## Linux
* Download the zip, or git clone
* Navigate into your script directory and run `pip3 install -r requirements.txt`
* Copy the included `config.example.ini` file to a known location. The script will not run without this. There is no default configuration file location. [Modify the configuration file to suit your needs.](Config.md)
* Run with `python3 main.py -c=` and the path to the config file.
## Docker
This portion of the guide assumes you are using a Linux system with both docker and docker-compose installed. This (or some variation thereof) may work on Mac or Windows, but has not been tested.
* this guide assumes we wish to use the `~/fhdhr` directory for our install (you can use whatever directory you like, just make the appropriate changes elsewhere in this guide) and that we are installing for NextPVR support
* run the following commands to clone the repo into `~/fhdhr/fHDHR_NextPVR`
```
cd ~/fhdhr
git clone https://github.com/fHDHR/fHDHR_NextPVR.git
```
* create your config.ini file (as described earlier in this guide) in the `~/fhdhr/fHDHR_NextPVR` directory
* while still in the `~/fhdhr` directory, create the following `docker-compose.yml` file
```
version: '3'
services:
nextpvr:
build: ./fHDHR_NextPVR
container_name: nextpvr
network_mode: host
volumes:
- ./fHDHR_NextPVR/config.ini:/app/config/config.ini
```
* run the following command to build and launch the container
```
docker-compose up --build -d nextpvr
```
After a short period of time (during which docker will build your new fHDHR container), you should now have a working build of fHDHR running inside a docker container.
As the code changes and new versions / bug fixes are released, at any point you can pull the latest version of the code and rebuild your container with the following commands:
```
cd ~/fhdhr/fHDHR_NextPVR
git checkout master
git pull
cd ~/fhdhr
docker-compose up --build -d nextpvr
```
<hr />
You can also run multiple instances of fHDHR to support additional sources by cloning the appropriate repo into your `~/fhdhr` directory and adding the necessary services to the docker-compose file we created above.
* for example, if we also wanted PlutoTV support, you would clone the PlutoTV repository:
```
cd ~/fhdhr
git clone https://github.com/fHDHR/fHDHR_PlutoTV.git
```
* **NOTE**: if you are running multiple services on the same machine, you must change the port in your config.ini file for each one. For example, if NextPVR was using the default port of 5004, PlutoTV cannot also use that port. You must change the port in your PlutoTV config.ini file to something else (5005, for example).
* add plutotv as a service in your `docker-compose.yml` file
```
version: '3'
services:
nextpvr:
build: ./fHDHR_NextPVR
container_name: nextpvr
network_mode: host
volumes:
- ./fHDHR_NextPVR/config.ini:/app/config/config.ini
plutotv:
build: ./fHDHR_PlutoTV
container_name: plutotv
network_mode: host
volumes:
- ./fHDHR_PlutoTV/config.ini:/app/config/config.ini
```
* run the following command to build and launch the container
```
docker-compose up --build -d plutotv
```
You can repeat these instructions for as many fHDHR containers as your system resources will allow.
# Setup
Now that you have fHDHR running, You can navigate (in a web browser) to the IP:Port from the configuration step above.
If you did not setup a `discovery_address` in your config, SSDP will be disabled. This is not a problem as clients like Plex can have the IP:Port entered manually!
You can copy the xmltv link from the webUI and use that in your client software to provide Channel Guide information.

98
docs/WebUI.md Normal file
View File

@ -0,0 +1,98 @@
<p align="center">fHDHR <img src="images/logo.ico" alt="Logo"/></p>
---
[Main](README.md) | [Setup and Usage](Usage.md) | [NextPVR](Origin.md) | [Credits/Related Projects](Related-Projects.md)
---
**f**un
**H**ome
**D**istribution
**H**iatus
**R**ecreation
---
[Basic Configuration](Config.md) | [Advanced Configuration](ADV_Config.md) | [WebUI](WebUI.md)
---
This Page will introduce basic handling of the script from the Web Interface provided at IP:Port
The Pages are available in the buttons at the top, links to xmltv and m3u are provided at the top for ease of access.
# Main Landing Page
Below is the main landing page with basic information.
<img src="screenshots/webui_main.PNG" alt="Main Page"/>
# NextPVR
Here you will have access to some basic information about the service we are proxying.
The webUI will still work, even if setup didn't go smoothly.
<img src="screenshots/webui_origin.PNG" alt="Origin Page"/>
# Guide
This Page give you information about what is currently playing on all stations. It will also show the time remaining for each item.
* Note: The Play link in the left hand column can be copied to play a channel in VLC media player!
<img src="screenshots/webui_guide.PNG" alt="Guide Page"/>
# Cluster
Since SSDP is used for service discovery, I decided to also use it for ease of management.
This tab will not have the below options if SSDP isn't running.
Joining a cluster will provide a second row of buttons for the clustered servers.
Unjoined:
<img src="screenshots/webui_cluster_unjoined.PNG" alt="Cluster Page, UnJoined"/>
Joined:
<img src="screenshots/webui_cluster_joined.PNG" alt="Cluster Page, Joined"/>
# Streams
This Page will show all active streams, and tuner information. You can also terminate a stream from here.
* Note: Clients will often have an amount buffered, and the connection termination is not immediate from a viewing perspective. However, the connection to the source is indeed cut off.
<img src="screenshots/webui_streams.PNG" alt="Streams Page"/>
# xmltv
This page will give you access to all the xmltv formats provided by this varient.
From here, you can manually update or even clear the cached epg, and then update.
<img src="screenshots/webui_xmltv.PNG" alt="xmltv Page"/>
# Version
This page will give valuable information about the environment the script is being run in.
<img src="screenshots/webui_version.PNG" alt="Version Page"/>
# Diganostics
This page has various links to json/xml files that make the magic work, as well as debug and cluster information.
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>
# Settings
This page allows viewing/changing all possible configuration options.
* Note: This will require a restart of the script to have any effect.
<img src="screenshots/webui_diagnostics.PNG" alt="Diagnostics Page"/>

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 137 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 23 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 53 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 39 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 26 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -1,23 +1,42 @@
# coding=utf-8
from .origin import OriginServiceWrapper
from .device import fHDHR_Device
from .api import fHDHR_API_URLs
import fHDHR.tools
fHDHR_VERSION = "v0.4.0-beta"
fHDHR_VERSION = "v0.6.0-beta"
class fHDHR_OBJ():
class fHDHR_INT_OBJ():
def __init__(self, settings, logger, db):
def __init__(self, settings, logger, db, plugins):
self.version = fHDHR_VERSION
self.config = settings
self.logger = logger
self.db = db
self.plugins = plugins
self.web = fHDHR.tools.WebReq()
for plugin_name in list(self.plugins.plugins.keys()):
self.plugins.plugins[plugin_name].plugin_utils.web = self.web
self.origin = OriginServiceWrapper(settings, logger, self.web, db)
self.api = fHDHR_API_URLs(settings, self.web)
for plugin_name in list(self.plugins.plugins.keys()):
self.plugins.plugins[plugin_name].plugin_utils.api = self.api
self.device = fHDHR_Device(settings, self.version, self.origin, logger, self.web, db)
self.threads = {}
class fHDHR_OBJ():
def __init__(self, settings, logger, db, plugins):
self.fhdhr = fHDHR_INT_OBJ(settings, logger, db, plugins)
self.fhdhr.origins = fHDHR.origins.Origins(self.fhdhr)
self.device = fHDHR_Device(self.fhdhr, self.fhdhr.origins)
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.fhdhr, name):
return eval("self.fhdhr.%s" % name)

82
fHDHR/api/__init__.py Normal file
View File

@ -0,0 +1,82 @@
import urllib.parse
class Fillin_Client():
def __init__(self, settings, web):
self.config = settings
self.web = web
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.web.session, name):
return eval("self.web.session.%s" % name)
class fHDHR_API_URLs():
def __init__(self, settings, web):
self.config = settings
self.web = web
self.headers = {'User-Agent': "fHDHR/%s" % self.config.internal["versions"]["fHDHR"]}
# Replaced later
self.client = Fillin_Client(settings, web)
self.address = self.config.dict["fhdhr"]["address"]
self.discovery_address = self.config.dict["fhdhr"]["discovery_address"]
self.port = self.config.dict["fhdhr"]["port"]
def get(self, url, *args):
req_method = type(self.client).__name__
if not url.startswith("http"):
if not url.startswith("/"):
url = "/%s" % url
url = "%s%s" % (self.base, url)
if req_method == "FlaskClient":
self.client.get(url, headers=self.headers, *args)
else:
self.client.get(url, headers=self.headers, *args)
def post(self, url, *args):
req_method = type(self.client).__name__
if not url.startswith("http"):
if not url.startswith("/"):
url = "/%s" % url
url = "%s%s" % (self.base, url)
if req_method == "FlaskClient":
self.client.post(url, headers=self.headers, *args)
else:
self.client.post(url, headers=self.headers, *args)
@property
def base(self):
if self.discovery_address:
return ('http://%s:%s' % self.discovery_address_tuple)
elif self.address == "0.0.0.0":
return ('http://%s:%s' % self.address_tuple)
else:
return ('http://%s:%s' % self.address_tuple)
@property
def base_quoted(self):
return urllib.parse.quote(self.base)
@property
def discovery_address_tuple(self):
return (self.discovery_address, int(self.port))
@property
def localhost_address_tuple(self):
return ("127.0.0.1", int(self.port))
@property
def address_tuple(self):
return (self.address, int(self.port))

View File

@ -2,20 +2,21 @@ import os
import sys
import argparse
import time
import multiprocessing
from fHDHR import fHDHR_VERSION, fHDHR_OBJ
import fHDHR.exceptions
import fHDHR.config
from fHDHR.http import fHDHR_HTTP_Server
import fHDHR.logger
import fHDHR.plugins
import fHDHR.origins
from fHDHR.db import fHDHRdb
ERR_CODE = 1
ERR_CODE_NO_RESTART = 2
if sys.version_info.major == 2 or sys.version_info < (3, 3):
print('Error: fHDHR requires python 3.3+.')
if sys.version_info.major == 2 or sys.version_info < (3, 7):
print('Error: fHDHR requires python 3.7+.')
sys.exit(1)
@ -26,36 +27,38 @@ def build_args_parser():
return parser.parse_args()
def get_configuration(args, script_dir):
def get_configuration(args, script_dir, fHDHR_web):
if not os.path.isfile(args.cfg):
raise fHDHR.exceptions.ConfigurationNotFound(filename=args.cfg)
return fHDHR.config.Config(args.cfg, script_dir)
return fHDHR.config.Config(args.cfg, script_dir, fHDHR_web)
def run(settings, logger, db):
def run(settings, logger, db, script_dir, fHDHR_web, plugins):
fhdhr = fHDHR_OBJ(settings, logger, db)
fhdhrweb = fHDHR_HTTP_Server(fhdhr)
fhdhr = fHDHR_OBJ(settings, logger, db, plugins)
fhdhrweb = fHDHR_web.fHDHR_HTTP_Server(fhdhr)
try:
print("HTTP Server Starting")
fhdhr_web = multiprocessing.Process(target=fhdhrweb.run)
fhdhr_web.start()
# Start Flask Thread
fhdhrweb.start()
# Start SSDP Thread
if settings.dict["fhdhr"]["discovery_address"]:
print("SSDP Server Starting")
fhdhr_ssdp = multiprocessing.Process(target=fhdhr.device.ssdp.run)
fhdhr_ssdp.start()
fhdhr.device.ssdp.start()
# Start EPG Thread
if settings.dict["epg"]["method"]:
print("EPG Update Starting")
fhdhr_epg = multiprocessing.Process(target=fhdhr.device.epg.run)
fhdhr_epg.start()
fhdhr.device.epg.start()
# Perform some actions now that HTTP Server is running
fhdhr.api.get("/api/startup_tasks")
# wait forever
while True:
time.sleep(3600)
restart_code = "restart"
while fhdhr.threads["flask"].is_alive():
time.sleep(1)
return restart_code
except KeyboardInterrupt:
return ERR_CODE_NO_RESTART
@ -63,30 +66,52 @@ def run(settings, logger, db):
return ERR_CODE
def start(args, script_dir):
def start(args, script_dir, fHDHR_web):
"""Get Configuration for fHDHR and start"""
try:
settings = get_configuration(args, script_dir)
settings = get_configuration(args, script_dir, fHDHR_web)
except fHDHR.exceptions.ConfigurationError as e:
print(e)
return ERR_CODE_NO_RESTART
logger = settings.logging_setup()
# Find Plugins and import their default configs
plugins = fHDHR.plugins.PluginsHandler(settings)
# Apply User Configuration
settings.user_config()
settings.config_verification()
# Setup Logging
logger = fHDHR.logger.Logger(settings)
# Setup Database
db = fHDHRdb(settings)
return run(settings, logger, db)
# Setup Plugins
plugins.load_plugins(logger, db)
plugins.setup()
settings.config_verification_plugins()
if not len([x for x in list(plugins.plugins.keys()) if plugins.plugins[x].type == "origin"]):
print("No Origin Plugins found.")
return ERR_CODE
return run(settings, logger, db, script_dir, fHDHR_web, plugins)
def main(script_dir):
def main(script_dir, fHDHR_web):
"""fHDHR run script entry point"""
print("Loading fHDHR " + fHDHR_VERSION)
print("Loading fHDHR %s" % fHDHR_VERSION)
print("Loading fHDHR_web %s" % fHDHR_web.fHDHR_web_VERSION)
try:
args = build_args_parser()
return start(args, script_dir)
while True:
returned_code = start(args, script_dir, fHDHR_web)
if returned_code not in ["restart"]:
return returned_code
except KeyboardInterrupt:
print("\n\nInterrupted")
return ERR_CODE

View File

@ -1,79 +1,281 @@
import os
import sys
import random
import configparser
import pathlib
import logging
import subprocess
import platform
import json
import fHDHR.exceptions
from fHDHR.tools import isint, isfloat, is_arithmetic
from fHDHR import fHDHR_VERSION
from fHDHR.tools import isint, isfloat, is_arithmetic, is_docker
class Config():
def __init__(self, filename, script_dir):
def __init__(self, filename, script_dir, fHDHR_web):
self.fHDHR_web = fHDHR_web
self.internal = {}
self.conf_default = {}
self.dict = {}
self.internal["versions"] = {}
self.config_file = filename
self.parser = configparser.RawConfigParser(allow_no_value=True)
self.load_defaults(script_dir)
self.core_setup(script_dir)
print("Loading Configuration File: " + str(self.config_file))
self.read_config(self.config_file)
self.config_verification()
def load_defaults(self, script_dir):
def core_setup(self, script_dir):
data_dir = pathlib.Path(script_dir).joinpath('data')
www_dir = pathlib.Path(data_dir).joinpath('www')
www_images_dir = pathlib.Path(www_dir).joinpath('images')
internal_plugins_dir = pathlib.Path(script_dir).joinpath('plugins')
fHDHR_web_dir = pathlib.Path(script_dir).joinpath('fHDHR_web')
www_dir = pathlib.Path(fHDHR_web_dir).joinpath('www_dir')
self.dict["filedir"] = {
self.internal["paths"] = {
"script_dir": script_dir,
"data_dir": data_dir,
"plugins_dir": [internal_plugins_dir],
"cache_dir": pathlib.Path(data_dir).joinpath('cache'),
"internal_config": pathlib.Path(data_dir).joinpath('internal_config'),
"fHDHR_web_dir": fHDHR_web_dir,
"www_dir": www_dir,
"www_images_dir": www_images_dir,
"www_templates_dir": pathlib.Path(fHDHR_web_dir).joinpath('templates'),
"font": pathlib.Path(data_dir).joinpath('garamond.ttf'),
"favicon": pathlib.Path(data_dir).joinpath('favicon.ico'),
"epg_cache": {},
}
for conffile in os.listdir(self.dict["filedir"]["internal_config"]):
conffilepath = os.path.join(self.dict["filedir"]["internal_config"], conffile)
self.read_config(conffilepath)
for conffile in os.listdir(self.internal["paths"]["internal_config"]):
conffilepath = os.path.join(self.internal["paths"]["internal_config"], conffile)
if str(conffilepath).endswith(".json"):
self.read_json_config(conffilepath)
def read_config(self, conffilepath):
for file_item in os.listdir(self.internal["paths"]["fHDHR_web_dir"]):
file_item_path = pathlib.Path(self.internal["paths"]["fHDHR_web_dir"]).joinpath(file_item)
if str(file_item_path).endswith("_conf.json"):
self.read_json_config(file_item_path)
self.dict["epg"]["valid_methods"] = {None: {}}
self.dict["origins"] = {}
self.dict["origins"]["valid_methods"] = {}
self.dict["streaming"]["valid_methods"] = {"direct": {}}
self.dict["plugin_web_paths"] = {}
self.load_versions()
def register_web_path(self, name, path, plugin_dict_name):
self.dict["plugin_web_paths"][name.lower()] = {
"name": name,
"namespace": name.lower(),
"path": path,
"plugin": plugin_dict_name
}
def register_valid_origin_method(self, method_item):
self.dict["origins"]["valid_methods"][method_item.lower()] = {
"name": method_item,
"namespace": method_item.lower(),
}
def register_valid_streaming_method(self, method_item, plugin_dict_name):
self.dict["streaming"]["valid_methods"][method_item.lower()] = {
"name": method_item,
"namespace": method_item.lower(),
"plugin": plugin_dict_name
}
def register_valid_epg_method(self, method_item, plugin_dict_name):
self.dict["epg"]["valid_methods"][method_item.lower()] = {
"name": method_item,
"namespace": method_item.lower(),
"plugin": plugin_dict_name
}
def register_version(self, item_name, item_version, item_type):
self.internal["versions"][item_name] = {
"name": item_name,
"version": item_version,
"type": item_type
}
def import_conf_json(self, file_item_path):
self.read_json_config(file_item_path)
def load_versions(self):
self.register_version("fHDHR", fHDHR_VERSION, "fHDHR")
self.register_version("fHDHR_web", self.fHDHR_web.fHDHR_web_VERSION, "fHDHR")
self.register_version("Python", sys.version, "env")
opersystem = platform.system()
self.register_version("Operating System", opersystem, "env")
if opersystem in ["Linux", "Darwin"]:
# Linux/Mac
if os.getuid() == 0 or os.geteuid() == 0:
print('Warning: Do not run fHDHR with root privileges.')
elif opersystem in ["Windows"]:
# Windows
if os.environ.get("USERNAME") == "Administrator":
print('Warning: Do not run fHDHR as Administrator.')
else:
print("Uncommon Operating System, use at your own risk.")
isdocker = is_docker()
self.register_version("Docker", isdocker, "env")
def user_config(self):
print("Loading Configuration File: %s" % self.config_file)
self.read_ini_config(self.config_file)
def config_verification_plugins(self):
required_missing = {}
# create dict and combine items
for config_section in list(self.conf_default.keys()):
for config_item in list(self.conf_default[config_section].keys()):
if self.conf_default[config_section][config_item]["required"]:
if not self.dict[config_section][config_item]:
if config_section not in list(required_missing.keys()):
required_missing[config_section] = []
required_missing[config_section].append(config_item)
for config_section in list(required_missing.keys()):
print("Warning! Required configuration options missing: [%s]%s" % (config_section, ", ".join(required_missing[config_section])))
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
if isinstance(self.dict["epg"]["method"], str):
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
epg_methods = []
for epg_method in self.dict["epg"]["method"]:
if epg_method in list(self.dict["epg"]["valid_methods"].keys()):
epg_methods.append(epg_method)
elif epg_method in list(self.dict["origins"]["valid_methods"].keys()):
epg_methods.append(epg_method)
else:
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
if self.dict["epg"]["method"]:
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
else:
self.dict["epg"]["def_method"] = None
if self.dict["streaming"]["method"] not in self.dict["streaming"]["valid_methods"]:
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
def config_verification(self):
if not self.dict["main"]["uuid"]:
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
self.write('uuid', self.dict["main"]["uuid"], 'main')
if self.dict["main"]["cache_dir"]:
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
self.internal["paths"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
cache_dir = self.internal["paths"]["cache_dir"]
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
self.internal["paths"]["logs_dir"] = logs_dir
if not logs_dir.is_dir():
logs_dir.mkdir()
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
self.dict["fhdhr"]["discovery_address"] = None
def get_real_conf_value(self, key, confvalue):
if not confvalue:
confvalue = None
elif key == "xmltv_offset":
confvalue = str(confvalue)
elif str(confvalue) in ["0"]:
confvalue = 0
elif isint(confvalue):
confvalue = int(confvalue)
elif isfloat(confvalue):
confvalue = float(confvalue)
elif is_arithmetic(confvalue):
confvalue = eval(confvalue)
elif "," in confvalue:
confvalue = confvalue.split(",")
elif str(confvalue).lower() in ["none", ""]:
confvalue = None
elif str(confvalue).lower() in ["false"]:
confvalue = False
elif str(confvalue).lower() in ["true"]:
confvalue = True
return confvalue
def read_json_config(self, conffilepath):
with open(conffilepath, 'r') as jsonconf:
confimport = json.load(jsonconf)
for section in list(confimport.keys()):
if section not in self.dict.keys():
self.dict[section] = {}
if section not in self.conf_default.keys():
self.conf_default[section] = {}
for key in list(confimport[section].keys()):
if key not in list(self.conf_default[section].keys()):
self.conf_default[section][key] = {}
confvalue = self.get_real_conf_value(key, confimport[section][key]["value"])
self.dict[section][key] = confvalue
self.conf_default[section][key]["value"] = confvalue
for config_option in ["config_web_hidden", "config_file", "config_web", "required"]:
if config_option not in list(confimport[section][key].keys()):
config_option_value = False
else:
config_option_value = confimport[section][key][config_option]
if str(config_option_value).lower() in ["none"]:
config_option_value = None
elif str(config_option_value).lower() in ["false"]:
config_option_value = False
elif str(config_option_value).lower() in ["true"]:
config_option_value = True
self.conf_default[section][key][config_option] = config_option_value
def read_ini_config(self, conffilepath):
config_handler = configparser.ConfigParser()
config_handler.read(conffilepath)
for each_section in config_handler.sections():
if each_section.lower() not in list(self.dict.keys()):
self.dict[each_section.lower()] = {}
for (each_key, each_val) in config_handler.items(each_section):
if not each_val:
each_val = None
elif each_val.lower() in ["none", "false"]:
each_val = False
elif each_val.lower() in ["true"]:
each_val = True
elif isint(each_val):
each_val = int(each_val)
elif isfloat(each_val):
each_val = float(each_val)
elif is_arithmetic(each_val):
each_val = eval(each_val)
elif "," in each_val:
each_val = each_val.split(",")
self.dict[each_section.lower()][each_key.lower()] = each_val
each_val = self.get_real_conf_value(each_key, each_val)
def write(self, section, key, value):
if section == self.dict["main"]["dictpopname"]:
self.dict["origin"][key] = value
else:
self.dict[section][key] = value
import_val = True
if each_section in list(self.conf_default.keys()):
if each_key in list(self.conf_default[each_section].keys()):
if not self.conf_default[each_section][each_key]["config_file"]:
import_val = False
if import_val:
self.dict[each_section.lower()][each_key.lower()] = each_val
def write(self, key, value, section):
if not value:
value = None
if value.lower() in ["none"]:
value = None
elif value.lower() in ["false"]:
value = False
elif value.lower() in ["true"]:
value = True
elif isint(value):
value = int(value)
elif isfloat(value):
value = float(value)
elif isinstance(value, list):
",".join(value)
self.dict[section][key] = value
config_handler = configparser.ConfigParser()
config_handler.read(self.config_file)
@ -81,126 +283,12 @@ class Config():
if not config_handler.has_section(section):
config_handler.add_section(section)
config_handler.set(section, key, value)
config_handler.set(section, key, str(value))
with open(self.config_file, 'w') as config_file:
config_handler.write(config_file)
def config_verification(self):
if self.dict["main"]["required"]:
required_missing = []
if isinstance(self.dict["main"]["required"], str):
self.dict["main"]["required"] = [self.dict["main"]["required"]]
if len(self.dict["main"]["required"]):
for req_item in self.dict["main"]["required"]:
req_section = req_item.split("/")[0]
req_key = req_item.split("/")[1]
if not self.dict[req_section][req_key]:
required_missing.append(req_item)
if len(required_missing):
raise fHDHR.exceptions.ConfigurationError("Required configuration options missing: " + ", ".join(required_missing))
self.dict["origin"] = self.dict.pop(self.dict["main"]["dictpopname"])
if isinstance(self.dict["main"]["valid_epg_methods"], str):
self.dict["main"]["valid_epg_methods"] = [self.dict["main"]["valid_epg_methods"]]
if self.dict["epg"]["method"] and self.dict["epg"]["method"] not in ["None"]:
if isinstance(self.dict["epg"]["method"], str):
self.dict["epg"]["method"] = [self.dict["epg"]["method"]]
epg_methods = []
for epg_method in self.dict["epg"]["method"]:
if epg_method == self.dict["main"]["dictpopname"] or epg_method == "origin":
epg_methods.append("origin")
elif epg_method in ["None"]:
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
elif epg_method in self.dict["main"]["valid_epg_methods"]:
epg_methods.append(epg_method)
else:
raise fHDHR.exceptions.ConfigurationError("Invalid EPG Method. Exiting...")
self.dict["epg"]["def_method"] = self.dict["epg"]["method"][0]
# generate UUID here for when we are not using docker
if not self.dict["main"]["uuid"]:
# from https://pynative.com/python-generate-random-string/
# create a string that wouldn't be a real device uuid for
self.dict["main"]["uuid"] = ''.join(random.choice("hijklmnopqrstuvwxyz") for i in range(8))
self.write('main', 'uuid', self.dict["main"]["uuid"])
if self.dict["main"]["cache_dir"]:
if not pathlib.Path(self.dict["main"]["cache_dir"]).is_dir():
raise fHDHR.exceptions.ConfigurationError("Invalid Cache Directory. Exiting...")
self.dict["filedir"]["cache_dir"] = pathlib.Path(self.dict["main"]["cache_dir"])
cache_dir = self.dict["filedir"]["cache_dir"]
logs_dir = pathlib.Path(cache_dir).joinpath('logs')
self.dict["filedir"]["logs_dir"] = logs_dir
if not logs_dir.is_dir():
logs_dir.mkdir()
self.dict["database"]["path"] = pathlib.Path(cache_dir).joinpath('fhdhr.db')
for epg_method in self.dict["main"]["valid_epg_methods"]:
if epg_method and epg_method != "None":
epg_cache_dir = pathlib.Path(cache_dir).joinpath(epg_method)
if not epg_cache_dir.is_dir():
epg_cache_dir.mkdir()
if epg_method not in list(self.dict["filedir"]["epg_cache"].keys()):
self.dict["filedir"]["epg_cache"][epg_method] = {}
self.dict["filedir"]["epg_cache"][epg_method]["top"] = epg_cache_dir
epg_web_cache_dir = pathlib.Path(epg_cache_dir).joinpath("web_cache")
if not epg_web_cache_dir.is_dir():
epg_web_cache_dir.mkdir()
self.dict["filedir"]["epg_cache"][epg_method]["web_cache"] = epg_web_cache_dir
self.dict["filedir"]["epg_cache"][epg_method]["epg_json"] = pathlib.Path(epg_cache_dir).joinpath('epg.json')
if self.dict["fhdhr"]["stream_type"] not in ["direct", "ffmpeg"]:
raise fHDHR.exceptions.ConfigurationError("Invalid stream type. Exiting...")
if self.dict["fhdhr"]["stream_type"] == "ffmpeg":
try:
ffmpeg_command = [self.dict["ffmpeg"]["ffmpeg_path"],
"-version",
"pipe:stdout"
]
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
ffmpeg_version = ffmpeg_proc.stdout.read()
ffmpeg_proc.terminate()
ffmpeg_proc.communicate()
ffmpeg_version = ffmpeg_version.decode().split("version ")[1].split(" ")[0]
except FileNotFoundError:
ffmpeg_version = None
self.dict["ffmpeg"]["version"] = ffmpeg_version
if not self.dict["fhdhr"]["discovery_address"] and self.dict["fhdhr"]["address"] != "0.0.0.0":
self.dict["fhdhr"]["discovery_address"] = self.dict["fhdhr"]["address"]
if not self.dict["fhdhr"]["discovery_address"] or self.dict["fhdhr"]["discovery_address"] == "0.0.0.0":
self.dict["fhdhr"]["discovery_address"] = None
def logging_setup(self):
log_level = self.dict["logging"]["level"].upper()
# Create a custom logger
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
logger = logging.getLogger('fHDHR')
log_file = os.path.join(self.dict["filedir"]["logs_dir"], 'fHDHR.log')
# Create handlers
# c_handler = logging.StreamHandler()
f_handler = logging.FileHandler(log_file)
# c_handler.setLevel(log_level)
f_handler.setLevel(log_level)
# Create formatters and add it to handlers
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
# logger.addHandler(c_handler)
logger.addHandler(f_handler)
return logger
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if name in list(self.dict.keys()):
return self.dict[name]

View File

@ -32,28 +32,10 @@ MYSQL_TABLE_ARGS = {'mysql_engine': 'InnoDB',
'mysql_collate': 'utf8mb4_unicode_ci'}
class ChannelValues(BASE):
__tablename__ = 'channel_values'
class PluginValues(BASE):
__tablename__ = 'plugin_values'
__table_args__ = MYSQL_TABLE_ARGS
channel = Column(String(255), primary_key=True)
namespace = Column(String(255), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(Text())
class ProgramValues(BASE):
__tablename__ = 'program_values'
__table_args__ = MYSQL_TABLE_ARGS
program = Column(String(255), primary_key=True)
namespace = Column(String(255), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(Text())
class CacheValues(BASE):
__tablename__ = 'cache_values'
__table_args__ = MYSQL_TABLE_ARGS
cacheitem = Column(String(255), primary_key=True)
pluginitem = Column(String(255), primary_key=True)
namespace = Column(String(255), primary_key=True)
key = Column(String(255), primary_key=True)
value = Column(Text())
@ -104,7 +86,7 @@ class fHDHRdb(object):
db_user = self.config.dict["database"]["user"]
db_pass = self.config.dict["database"]["pass"]
db_host = self.config.dict["database"]["host"]
db_port = self.config.dict["database"]["prt"] # Optional
db_port = self.config.dict["database"]["port"] # Optional
db_name = self.config.dict["database"]["name"] # Optional, depending on DB
# Ensure we have all our variables defined
@ -148,198 +130,6 @@ class fHDHRdb(object):
def get_uri(self):
return self.url
# Channel Values
def set_channel_value(self, channel, key, value, namespace='default'):
channel = channel.lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(ChannelValues) \
.filter(ChannelValues.channel == channel)\
.filter(ChannelValues.namespace == namespace)\
.filter(ChannelValues.key == key) \
.one_or_none()
# ChannelValues exists, update
if result:
result.value = value
session.commit()
# DNE - Insert
else:
new_channelvalue = ChannelValues(channel=channel, namespace=namespace, key=key, value=value)
session.add(new_channelvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def get_channel_value(self, channel, key, namespace='default'):
channel = channel.lower()
session = self.ssession()
try:
result = session.query(ChannelValues) \
.filter(ChannelValues.channel == channel)\
.filter(ChannelValues.namespace == namespace)\
.filter(ChannelValues.key == key) \
.one_or_none()
if result is not None:
result = result.value
return _deserialize(result)
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def delete_channel_value(self, channel, key, namespace='default'):
channel = channel.lower()
session = self.ssession()
try:
result = session.query(ChannelValues) \
.filter(ChannelValues.channel == channel)\
.filter(ChannelValues.namespace == namespace)\
.filter(ChannelValues.key == key) \
.one_or_none()
# ChannelValues exists, delete
if result:
session.delete(result)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
# Program Values
def set_program_value(self, program, key, value, namespace='default'):
program = program.lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(ProgramValues) \
.filter(ProgramValues.program == program)\
.filter(ProgramValues.namespace == namespace)\
.filter(ProgramValues.key == key) \
.one_or_none()
# ProgramValue exists, update
if result:
result.value = value
session.commit()
# DNE - Insert
else:
new_programvalue = ProgramValues(program=program, namespace=namespace, key=key, value=value)
session.add(new_programvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def get_program_value(self, program, key, namespace='default'):
program = program.lower()
session = self.ssession()
try:
result = session.query(ProgramValues) \
.filter(ProgramValues.program == program)\
.filter(ProgramValues.namespace == namespace)\
.filter(ProgramValues.key == key) \
.one_or_none()
if result is not None:
result = result.value
return _deserialize(result)
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def delete_program_value(self, program, key, namespace='default'):
program = program.lower()
session = self.ssession()
try:
result = session.query(ProgramValues) \
.filter(ProgramValues.program == program)\
.filter(ProgramValues.namespace == namespace)\
.filter(ProgramValues.key == key) \
.one_or_none()
# ProgramValue exists, delete
if result:
session.delete(result)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
# Cache Values
def set_cacheitem_value(self, cacheitem, key, value, namespace='default'):
cacheitem = cacheitem.lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(CacheValues) \
.filter(CacheValues.cacheitem == cacheitem)\
.filter(CacheValues.namespace == namespace)\
.filter(CacheValues.key == key) \
.one_or_none()
# ProgramValue exists, update
if result:
result.value = value
session.commit()
# DNE - Insert
else:
new_cacheitemvalue = CacheValues(cacheitem=cacheitem, namespace=namespace, key=key, value=value)
session.add(new_cacheitemvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def get_cacheitem_value(self, cacheitem, key, namespace='default'):
cacheitem = cacheitem.lower()
session = self.ssession()
try:
result = session.query(CacheValues) \
.filter(CacheValues.cacheitem == cacheitem)\
.filter(CacheValues.namespace == namespace)\
.filter(CacheValues.key == key) \
.one_or_none()
if result is not None:
result = result.value
return _deserialize(result)
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def delete_cacheitem_value(self, cacheitem, key, namespace='default'):
cacheitem = cacheitem.lower()
session = self.ssession()
try:
result = session.query(CacheValues) \
.filter(CacheValues.cacheitem == cacheitem)\
.filter(CacheValues.namespace == namespace)\
.filter(CacheValues.key == key) \
.one_or_none()
# ProgramValue exists, delete
if result:
session.delete(result)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
# fHDHR Values
def set_fhdhr_value(self, item, key, value, namespace='default'):
@ -358,8 +148,8 @@ class fHDHRdb(object):
session.commit()
# DNE - Insert
else:
new_cacheitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
session.add(new_cacheitemvalue)
new_pluginitemvalue = fHDHRValues(item=item, namespace=namespace, key=key, value=value)
session.add(new_pluginitemvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
@ -403,3 +193,67 @@ class fHDHRdb(object):
raise
finally:
session.close()
# Plugin Values
def set_plugin_value(self, pluginitem, key, value, namespace='default'):
pluginitem = pluginitem.lower()
value = json.dumps(value, ensure_ascii=False)
session = self.ssession()
try:
result = session.query(PluginValues) \
.filter(PluginValues.pluginitem == pluginitem)\
.filter(PluginValues.namespace == namespace)\
.filter(PluginValues.key == key) \
.one_or_none()
# ProgramValue exists, update
if result:
result.value = value
session.commit()
# DNE - Insert
else:
new_pluginitemvalue = PluginValues(pluginitem=pluginitem, namespace=namespace, key=key, value=value)
session.add(new_pluginitemvalue)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def get_plugin_value(self, pluginitem, key, namespace='default'):
pluginitem = pluginitem.lower()
session = self.ssession()
try:
result = session.query(PluginValues) \
.filter(PluginValues.pluginitem == pluginitem)\
.filter(PluginValues.namespace == namespace)\
.filter(PluginValues.key == key) \
.one_or_none()
if result is not None:
result = result.value
return _deserialize(result)
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()
def delete_plugin_value(self, pluginitem, key, namespace='default'):
pluginitem = pluginitem.lower()
session = self.ssession()
try:
result = session.query(PluginValues) \
.filter(PluginValues.pluginitem == pluginitem)\
.filter(PluginValues.namespace == namespace)\
.filter(PluginValues.key == key) \
.one_or_none()
# ProgramValue exists, delete
if result:
session.delete(result)
session.commit()
except SQLAlchemyError:
session.rollback()
raise
finally:
session.close()

View File

@ -1,29 +1,34 @@
from .channels import Channels
from .epg import EPG
from .tuners import Tuners
from .watch import WatchStream
from .images import imageHandler
from .station_scan import Station_Scan
from .ssdp import SSDPServer
from .cluster import fHDHR_Cluster
class fHDHR_Device():
def __init__(self, settings, fhdhr_version, origin, logger, web, db):
def __init__(self, fhdhr, origins):
self.fhdhr = fhdhr
self.channels = Channels(settings, origin, logger, db)
self.channels = Channels(fhdhr, origins)
self.epg = EPG(settings, self.channels, origin, logger, web, db)
self.epg = EPG(fhdhr, self.channels, origins)
self.tuners = Tuners(settings, self.epg, logger)
self.tuners = Tuners(fhdhr, self.epg, self.channels)
self.watch = WatchStream(settings, self.channels, self.tuners, logger, web)
self.images = imageHandler(fhdhr, self.epg)
self.images = imageHandler(settings, self.epg, logger, web)
self.ssdp = SSDPServer(fhdhr)
self.station_scan = Station_Scan(settings, self.channels, logger, db)
self.interfaces = {}
self.ssdp = SSDPServer(settings, fhdhr_version, logger, db)
self.cluster = fHDHR_Cluster(settings, self.ssdp, logger, db, web)
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].manifest["type"] == "interface":
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
plugin_utils.channels = self.channels
plugin_utils.epg = self.epg
plugin_utils.tuners = self.tuners
plugin_utils.images = self.images
plugin_utils.ssdp = self.ssdp
self.interfaces[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, plugin_utils)

View File

@ -1,152 +0,0 @@
import datetime
from collections import OrderedDict
from fHDHR.tools import hours_between_datetime
class ChannelNumbers():
def __init__(self, settings, logger, db):
self.config = settings
self.logger = logger
self.db = db
def get_number(self, channel_id):
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
if channel_id in list(cnumbers.keys()):
return cnumbers[channel_id]
used_numbers = []
for channel_id in list(cnumbers.keys()):
used_numbers.append(cnumbers[channel_id])
for i in range(1, 1000):
if str(float(i)) not in used_numbers:
break
return str(float(i))
def set_number(self, channel_id, channel_number):
cnumbers = self.db.get_fhdhr_value("channel_numbers", "list") or {}
cnumbers[channel_id] = str(float(channel_number))
self.db.set_fhdhr_value("channel_numbers", "list", cnumbers)
class Channels():
def __init__(self, settings, origin, logger, db):
self.config = settings
self.logger = logger
self.origin = origin
self.db = db
self.channel_numbers = ChannelNumbers(settings, logger, db)
self.list = {}
self.list_update_time = None
self.get_channels()
def get_origin_status(self):
try:
return self.origin.get_status_dict()
except AttributeError:
return {}
def get_channels(self, forceupdate=False):
"""Pull Channels from origin.
Output a list.
Don't pull more often than 12 hours.
"""
updatelist = False
if not self.list_update_time:
updatelist = True
elif hours_between_datetime(self.list_update_time, datetime.datetime.now()) > 12:
updatelist = True
elif forceupdate:
updatelist = True
if updatelist:
channel_dict_list = self.origin.get_channels()
channel_dict_list = self.verify_channel_info(channel_dict_list)
self.append_channel_info(channel_dict_list)
if not self.list_update_time:
self.logger.info("Found " + str(len(self.list)) + " channels for " + str(self.config.dict["main"]["servicename"]))
self.list_update_time = datetime.datetime.now()
channel_list = []
for chandict in list(self.list.keys()):
channel_list.append(self.list[chandict])
return channel_list
def get_station_list(self, base_url):
station_list = []
for c in self.get_channels():
station_list.append({
'GuideNumber': c['number'],
'GuideName': c['name'],
'URL': self.get_fhdhr_stream_url(base_url, c['number']),
})
return station_list
def get_channel_stream(self, channel_number):
if channel_number not in list(self.list.keys()):
self.get_channels()
if channel_number not in list(self.list.keys()):
return None
if "stream_url" not in list(self.list[channel_number].keys()):
chandict = self.get_channel_dict("number", channel_number)
streamlist, caching = self.origin.get_channel_stream(chandict, self.list)
if caching:
self.append_channel_info(streamlist)
return self.list[channel_number]["stream_url"]
else:
chanstreamdict = next(item for item in streamlist if item["number"] == channel_number)
return chanstreamdict["stream_url"]
return self.list[channel_number]["stream_url"]
def get_station_total(self):
return len(list(self.list.keys()))
def get_channel_dict(self, keyfind, valfind):
chanlist = self.get_channels()
return next(item for item in chanlist if item[keyfind] == valfind)
def get_fhdhr_stream_url(self, base_url, channel_number):
return ('%s/auto/v%s' %
(base_url,
channel_number))
def verify_channel_info(self, channel_dict_list):
"""Some Channel Information is Critical"""
cleaned_channel_dict_list = []
for station_item in channel_dict_list:
if "callsign" not in list(station_item.keys()):
station_item["callsign"] = station_item["name"]
if "id" not in list(station_item.keys()):
station_item["id"] = station_item["name"]
if "number" not in list(station_item.keys()):
station_item["number"] = self.channel_numbers.get_number(station_item["id"])
else:
station_item["number"] = str(float(station_item["number"]))
self.channel_numbers.set_number(station_item["id"], station_item["number"])
cleaned_channel_dict_list.append(station_item)
return cleaned_channel_dict_list
def append_channel_info(self, channel_dict_list):
"""Update the list dict
Take the channel dict list given.
"""
for chan in channel_dict_list:
if chan["number"] not in list(self.list.keys()):
self.list[chan["number"]] = {}
for chankey in list(chan.keys()):
self.list[chan["number"]][chankey] = chan[chankey]
self.channel_order()
def channel_order(self):
"""Verify the Channel Order"""
self.list = OrderedDict(sorted(self.list.items()))

View File

@ -0,0 +1,183 @@
import time
from fHDHR.tools import humanized_time
from .channel import Channel
from .chan_ident import Channel_IDs
class Channels():
def __init__(self, fhdhr, origins):
self.fhdhr = fhdhr
self.origins = origins
self.id_system = Channel_IDs(fhdhr, origins)
self.list = {}
for origin in list(self.origins.origins_dict.keys()):
self.list[origin] = {}
self.get_db_channels()
def get_channel_obj(self, keyfind, valfind, origin=None):
if origin:
origin = origin.lower()
if keyfind == "number":
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
else:
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
if len(matches):
return self.list[origin][matches[0]]
else:
matches = []
for origin in list(self.list.keys()):
if keyfind == "number":
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].number == valfind]
else:
matches = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys()) if self.list[origin][x].dict[keyfind] == valfind]
if len(matches):
return self.list[origin][matches[0]]
if len(matches):
return self.list[origin][matches[0]]
return None
def get_channel_list(self, keyfind, origin=None):
if origin:
if keyfind == "number":
return [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
else:
return [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
else:
matches = []
for origin in list(self.list.keys()):
if keyfind == "number":
next_match = [self.list[origin][x].number for x in [x["id"] for x in self.get_channels(origin)]]
else:
next_match = [self.list[origin][x].dict[keyfind] for x in [x["id"] for x in self.get_channels(origin)]]
if len(next_match):
matches.append(next_match)
return matches[0]
def get_channel_dict(self, keyfind, valfind, origin=None):
chan_obj = self.get_channel_obj(keyfind, valfind, origin)
if chan_obj:
return chan_obj.dict
return None
def set_channel_status(self, keyfind, valfind, updatedict, origin):
self.get_channel_obj(keyfind, valfind, origin).set_status(updatedict)
def set_channel_enablement_all(self, enablement, origin):
for fhdhr_id in [x["id"] for x in self.get_channels(origin)]:
self.list[fhdhr_id].set_enablement(enablement, origin)
def set_channel_enablement(self, keyfind, valfind, enablement, origin):
self.get_channel_obj(keyfind, valfind, origin).set_enablement(enablement)
def set_channel_favorite(self, keyfind, valfind, enablement, origin):
self.get_channel_obj(keyfind, valfind, origin).set_favorite(enablement)
def get_db_channels(self, origin=None):
if not origin:
origins_list = list(self.list.keys())
else:
origins_list = origin.lower()
if isinstance(origins_list, str):
origins_list = [origins_list]
for origin in origins_list:
self.fhdhr.logger.info("Checking for %s Channel information stored in the database." % origin)
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
if len(channel_ids):
self.fhdhr.logger.info("Found %s existing channels in the database." % str(len(channel_ids)))
for channel_id in channel_ids:
channel_obj = Channel(self.fhdhr, self.id_system, origin=origin, channel_id=channel_id)
channel_id = channel_obj.dict["id"]
self.list[origin][channel_id] = channel_obj
def save_db_channels(self, origin=None):
if not origin:
origins_list = list(self.list.keys())
else:
origins_list = origin.lower()
if isinstance(origins_list, str):
origins_list = [origins_list]
for origin in origins_list:
channel_ids = [self.list[origin][x].dict["id"] for x in list(self.list[origin].keys())]
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, origin)
def get_channels(self, origin=None, forceupdate=False):
"""Pull Channels from origin.
Output a list.
Don't pull more often than 12 hours.
"""
if not origin:
origins_list = list(self.list.keys())
else:
origins_list = origin.lower().lower()
if isinstance(origins_list, str):
origins_list = [origins_list]
return_chan_list = []
for origin in origins_list:
if not len(list(self.list[origin].keys())):
self.get_db_channels(origin=origin)
if not forceupdate:
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
else:
channel_origin_id_list = [str(self.list[origin][x].dict["origin_id"]) for x in list(self.list[origin].keys())]
self.fhdhr.logger.info("Performing Channel Scan for %s." % origin)
channel_dict_list = self.origins.origins_dict[origin].get_channels()
self.fhdhr.logger.info("Found %s channels for %s." % (len(channel_dict_list), origin))
self.fhdhr.logger.info("Performing Channel Import, This can take some time, Please wait.")
newchan = 0
chan_scan_start = time.time()
for channel_info in channel_dict_list:
chan_existing = str(channel_info["id"]) in channel_origin_id_list
if chan_existing:
channel_obj = self.get_channel_obj("origin_id", channel_info["id"], origin)
else:
channel_obj = Channel(self.fhdhr, self.id_system, origin, origin_id=channel_info["id"])
channel_id = channel_obj.dict["id"]
channel_obj.basics(channel_info)
if not chan_existing:
self.list[origin][channel_id] = channel_obj
newchan += 1
self.fhdhr.logger.info("%s Channel Import took %s" % (origin, humanized_time(time.time() - chan_scan_start)))
if not newchan:
newchan = "no"
self.fhdhr.logger.info("Found %s NEW channels for %s." % (newchan, origin))
self.fhdhr.logger.info("Total %s Channel Count: %s" % (origin, len(self.list[origin].keys())))
self.save_db_channels(origin=origin)
self.fhdhr.db.set_fhdhr_value("channels", "scanned_time", time.time(), origin)
return_chan_list.extend([self.list[origin][x].dict for x in list(self.list[origin].keys())])
return return_chan_list
def get_channel_stream(self, stream_args, origin):
return self.origins.origins_dict[origin].get_channel_stream(self.get_channel_dict("number", stream_args["channel"]), stream_args)

View File

@ -0,0 +1,46 @@
import uuid
class Channel_IDs():
def __init__(self, fhdhr, origins):
self.fhdhr = fhdhr
self.origins = origins
def get(self, origin_id, origin):
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
for existing_channel in existing_channel_info:
if existing_channel["origin_id"] == origin_id:
return existing_channel["id"]
return self.assign(origin)
def assign(self, origin):
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
channel_id = None
while not channel_id:
unique_id = str(uuid.uuid4())
if str(unique_id) not in existing_ids:
channel_id = str(unique_id)
existing_ids.append(channel_id)
self.fhdhr.db.set_fhdhr_value("channels", "list", existing_ids, origin)
return channel_id
def get_number(self, channel_id, origin):
existing_ids = self.fhdhr.db.get_fhdhr_value("channels", "list", origin) or []
existing_channel_info = [self.fhdhr.db.get_fhdhr_value(channel_id, "dict", origin) or {} for channel_id in existing_ids]
cnumber = [existing_channel["number"] for existing_channel in existing_channel_info if existing_channel["id"] == channel_id] or None
if cnumber:
return cnumber
used_numbers = []
for existing_channel in existing_channel_info:
if existing_channel["subnumber"]:
number = "%s.%s" % (existing_channel["number"], existing_channel["subnumber"])
else:
number = existing_channel["number"]
used_numbers.append(number)
for i in range(1000, 2000):
if str(float(i)) not in used_numbers:
break
return str(float(i))

View File

@ -0,0 +1,197 @@
import time
class Channel():
def __init__(self, fhdhr, id_system, origin, origin_id=None, channel_id=None):
self.fhdhr = fhdhr
self.origin = origin
self.id_system = id_system
if not channel_id:
if origin_id:
channel_id = id_system.get(origin_id, origin)
else:
channel_id = id_system.assign(origin)
self.channel_id = channel_id
self.dict = self.fhdhr.db.get_fhdhr_value(str(channel_id), "dict", self.origin) or self.default_dict
self.verify_dict()
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
@property
def number(self):
if self.dict["subnumber"]:
return "%s.%s" % (self.dict["number"], self.dict["subnumber"])
else:
return self.dict["number"]
@property
def thumbnail(self):
if str(self.dict["thumbnail"]).lower() in ["none"]:
return self.generic_image_url
elif self.dict["thumbnail"]:
return self.dict["thumbnail"]
elif self.dict["origin_thumbnail"]:
return self.dict["origin_thumbnail"]
else:
return self.generic_image_url
@property
def epgdict(self):
return {
"callsign": self.dict["callsign"],
"name": self.dict["name"],
"number": self.number,
"id": self.dict["origin_id"],
"thumbnail": self.thumbnail,
"listing": [],
}
def verify_dict(self):
"""Development Purposes
Add new Channel dict keys
"""
default_dict = self.default_dict
for key in list(default_dict.keys()):
if key not in list(self.dict.keys()):
self.dict[key] = default_dict[key]
if self.dict["number"]:
if "." in self.dict["number"]:
self.dict["subnumber"] = self.dict["number"].split(".")[1]
self.dict["number"] = self.dict["number"].split(".")[0]
def basics(self, channel_info):
"""Some Channel Information is Critical"""
if "name" not in list(channel_info.keys()):
channel_info["name"] = self.dict["id"]
elif not channel_info["name"]:
channel_info["name"] = self.dict["id"]
self.dict["origin_name"] = channel_info["name"]
if not self.dict["name"]:
self.dict["name"] = self.dict["origin_name"]
if "id" not in list(channel_info.keys()):
channel_info["id"] = channel_info["name"]
elif not channel_info["id"]:
channel_info["id"] = channel_info["name"]
self.dict["origin_id"] = channel_info["id"]
if "callsign" not in list(channel_info.keys()):
channel_info["callsign"] = channel_info["name"]
elif not channel_info["callsign"]:
channel_info["callsign"] = channel_info["name"]
self.dict["origin_callsign"] = channel_info["callsign"]
if not self.dict["callsign"]:
self.dict["callsign"] = self.dict["origin_callsign"]
if "tags" not in list(channel_info.keys()):
channel_info["tags"] = []
elif not channel_info["tags"]:
channel_info["tags"] = []
self.dict["origin_tags"] = channel_info["tags"]
if not self.dict["tags"]:
self.dict["tags"] = self.dict["origin_tags"]
if "number" not in list(channel_info.keys()):
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
elif not channel_info["number"]:
channel_info["number"] = self.id_system.get_number(channel_info["id"], self.origin)
self.dict["origin_number"] = str(channel_info["number"])
if not self.dict["number"]:
self.dict["number"] = self.dict["origin_number"].split(".")[0]
try:
self.dict["subnumber"] = self.dict["origin_number"].split(".")[1]
except IndexError:
self.dict["subnumber"] = None
else:
if "." in self.dict["number"]:
self.dict["subnumber"] = self.dict["number"].split(".")[1]
self.dict["number"] = self.dict["number"].split(".")[0]
if "thumbnail" not in list(channel_info.keys()):
channel_info["thumbnail"] = None
self.dict["origin_thumbnail"] = channel_info["thumbnail"]
if not self.dict["thumbnail"]:
self.dict["thumbnail"] = self.dict["origin_thumbnail"]
if "HD" not in list(channel_info.keys()):
channel_info["HD"] = 0
self.dict["HD"] = channel_info["HD"]
if "enabled" in list(channel_info.keys()):
if "created" not in list(self.dict.keys()):
self.dict["enabled"] = channel_info["enabled"]
if "created" not in list(self.dict.keys()):
self.dict["created"] = time.time()
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
@property
def default_dict(self):
return {
"id": str(self.channel_id), "origin_id": None,
"name": None, "origin_name": None,
"callsign": None, "origin_callsign": None,
"number": None, "subnumber": None, "origin_number": None,
"tags": [], "origin_tags": [],
"thumbnail": None, "origin_thumbnail": None,
"enabled": True, "favorite": 0,
"HD": 0,
}
def destroy(self):
self.fhdhr.db.delete_fhdhr_value(self.dict["id"], "dict", self.origin)
channel_ids = self.fhdhr.db.get_fhdhr_value("channels", "list") or []
if self.dict["id"] in channel_ids:
channel_ids.remove(self.dict["id"])
self.fhdhr.db.set_fhdhr_value("channels", "list", channel_ids, self.origin)
def set_status(self, updatedict):
for key in list(updatedict.keys()):
if key == "number":
updatedict[key] = str(updatedict[key])
self.dict[key] = updatedict[key]
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "dict", self.dict, self.origin)
@property
def generic_image_url(self):
return "/api/images?method=generate&type=channel&message=%s" % self.number
@property
def api_stream_url(self):
return '/api/tuners?method=stream&stream_method=%s&channel=%s&origin=%s' % (self.fhdhr.origins.origins_dict[self.origin].stream_method, self.dict["id"], self.origin)
@property
def api_m3u_url(self):
return '/api/m3u?method=get&channel=%s&origin=%s' % (self.dict["id"], self.origin)
def set_favorite(self, enablement):
if enablement == "+":
self.dict["favorite"] = 1
elif enablement == "-":
self.dict["favorite"] = 0
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
def set_enablement(self, enablement):
if enablement == "disable":
self.dict["enabled"] = False
elif enablement == "enable":
self.dict["enabled"] = True
elif enablement == "toggle":
if self.dict["enabled"]:
self.dict["enabled"] = False
else:
self.dict["enabled"] = True
self.fhdhr.db.set_fhdhr_value(self.dict["id"], "info", self.dict, self.origin)
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if name in list(self.dict.keys()):
return self.dict[name]
else:
return None

View File

@ -1,140 +0,0 @@
import urllib.parse
from collections import OrderedDict
class fHDHR_Cluster():
def __init__(self, settings, ssdp, logger, db, web):
self.config = settings
self.logger = logger
self.ssdp = ssdp
self.db = db
self.web = web
self.friendlyname = self.config.dict["fhdhr"]["friendlyname"]
self.location = None
self.location_url = None
if settings.dict["fhdhr"]["discovery_address"]:
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
str(settings.dict["fhdhr"]["port"]))
self.location_url = urllib.parse.quote(self.location)
self.startup_sync()
def cluster(self):
return self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
def get_list(self):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
return_dict = {}
for location in list(cluster.keys()):
if location != self.location:
return_dict[location] = {
"Joined": True
}
detected_list = self.ssdp.detect_method.get()
for location in detected_list:
if location not in list(cluster.keys()):
return_dict[location] = {
"Joined": False
}
return_dict = OrderedDict(sorted(return_dict.items()))
return return_dict
def default_cluster(self):
defdict = {}
defdict[self.location] = {
"base_url": self.location,
"name": self.friendlyname
}
return defdict
def startup_sync(self):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
for location in list(cluster.keys()):
if location != self.location:
sync_url = location + "/api/cluster?method=get"
try:
sync_open = self.web.session.get(sync_url)
retrieved_cluster = sync_open.json()
if self.location not in list(retrieved_cluster.keys()):
return self.leave()
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
def leave(self):
self.db.set_fhdhr_value("cluster", "dict", self.default_cluster())
def disconnect(self):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
for location in list(cluster.keys()):
if location != self.location:
sync_url = location + "/api/cluster?method=del&location=" + self.location
try:
self.web.session.get(sync_url)
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
self.leave()
def sync(self, location):
sync_url = location + "/api/cluster?method=get"
try:
sync_open = self.web.session.get(sync_url)
self.db.set_fhdhr_value("cluster", "dict", sync_open.json())
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
def push_sync(self):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
for location in list(cluster.keys()):
if location != self.location:
sync_url = location + "/api/cluster?method=sync&location=" + self.location_url
try:
self.web.session.get(sync_url)
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
def add(self, location):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
if location not in list(cluster.keys()):
cluster[location] = {"base_url": location}
location_info_url = location + "/discover.json"
try:
location_info_req = self.web.session.get(location_info_url)
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
del cluster[location]
self.db.set_fhdhr_value("cluster", "dict", cluster)
return
location_info = location_info_req.json()
cluster[location]["name"] = location_info["FriendlyName"]
cluster_info_url = location + "/api/cluster?method=get"
try:
cluster_info_req = self.web.session.get(cluster_info_url)
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
del cluster[location]
self.db.set_fhdhr_value("cluster", "dict", cluster)
return
cluster_info = cluster_info_req.json()
for cluster_key in list(cluster_info.keys()):
if cluster_key not in list(cluster.keys()):
cluster[cluster_key] = cluster_info[cluster_key]
self.db.set_fhdhr_value("cluster", "dict", cluster)
self.push_sync()
def remove(self, location):
cluster = self.db.get_fhdhr_value("cluster", "dict") or self.default_cluster()
if location in list(cluster.keys()):
del cluster[location]
sync_url = location + "/api/cluster?method=leave"
try:
self.web.session.get(sync_url)
except self.web.exceptions.ConnectionError:
self.logger.error("Unreachable: " + location)
self.push_sync()
self.db.set_fhdhr_value("cluster", "dict", cluster)

View File

@ -1,153 +0,0 @@
import os
import time
import datetime
from collections import OrderedDict
epgtype_list = []
device_dir = os.path.dirname(__file__)
for entry in os.scandir(device_dir + '/epgtypes'):
if entry.is_file():
if entry.name[0] != '_':
epgtype_list.append(str(entry.name[:-3]))
impstring = f'from .epgtypes import {entry.name}'[:-3]
exec(impstring)
class EPG():
def __init__(self, settings, channels, origin, logger, web, db):
self.config = settings
self.logger = logger
self.origin = origin
self.channels = channels
self.web = web
self.db = db
self.epgdict = {}
self.epg_method_selfadd()
self.epg_methods = self.config.dict["epg"]["method"]
self.def_method = self.config.dict["epg"]["def_method"]
self.sleeptime = {}
for epg_method in self.epg_methods:
if epg_method in list(self.config.dict.keys()):
if "update_frequency" in list(self.config.dict[epg_method].keys()):
self.sleeptime[epg_method] = self.config.dict[epg_method]["update_frequency"]
if epg_method not in list(self.sleeptime.keys()):
self.sleeptime[epg_method] = self.config.dict["epg"]["update_frequency"]
def whats_on_now(self, channel):
epgdict = self.get_epg()
listings = epgdict[channel]["listing"]
for listing in listings:
nowtime = datetime.datetime.utcnow()
start_time = datetime.datetime.strptime(listing["time_start"], '%Y%m%d%H%M%S +0000')
end_time = datetime.datetime.strptime(listing["time_end"], '%Y%m%d%H%M%S +0000')
if start_time <= nowtime <= end_time:
epgitem = epgdict[channel].copy()
epgitem["listing"] = [listing]
return epgitem
return None
def whats_on_allchans(self):
channel_guide_list = []
for channel in self.channels.get_channels():
whatson = self.whats_on_now(channel["number"])
if whatson:
channel_guide_list.append(whatson)
return channel_guide_list
def get_epg(self, method=None):
if not method:
method = self.def_method
if (method == self.config.dict["main"]["dictpopname"] or
method not in self.config.dict["main"]["valid_epg_methods"]):
method = "origin"
if method not in list(self.epgdict.keys()):
epgdict = self.db.get_fhdhr_value("epg_dict", method) or None
if not epgdict:
self.update(method)
self.epgdict[method] = self.db.get_fhdhr_value("epg_dict", method) or {}
else:
self.epgdict[method] = epgdict
return self.epgdict[method]
else:
return self.epgdict[method]
def get_thumbnail(self, itemtype, itemid):
if itemtype == "channel":
chandict = self.find_channel_dict(itemid)
return chandict["thumbnail"]
elif itemtype == "content":
progdict = self.find_program_dict(itemid)
return progdict["thumbnail"]
return None
def find_channel_dict(self, channel_id):
epgdict = self.get_epg()
channel_list = []
for channel in list(epgdict.keys()):
channel_list.append(epgdict[channel])
return next(item for item in channel_list if item["id"] == channel_id)
def find_program_dict(self, event_id):
epgdict = self.get_epg()
event_list = []
for channel in list(epgdict.keys()):
event_list.extend(epgdict[channel]["listing"])
return next(item for item in event_list if item["id"] == event_id)
def epg_method_selfadd(self):
for method in epgtype_list:
exec("%s = %s" % ("self." + str(method), str(method) + "." + str(method) + "EPG(self.config, self.channels, self.logger, self.web, self.db)"))
def update(self, method=None):
if not method:
method = self.def_method
if (method == self.config.dict["main"]["dictpopname"] or
method not in self.config.dict["main"]["valid_epg_methods"]):
method = "origin"
epgtypename = method
if method in [self.config.dict["main"]["dictpopname"], "origin"]:
epgtypename = self.config.dict["main"]["dictpopname"]
self.logger.info("Updating " + epgtypename + " EPG cache.")
method_to_call = getattr(self, method)
func_to_call = getattr(method_to_call, 'update_epg')
if method == 'origin':
programguide = func_to_call(self.channels)
else:
programguide = func_to_call()
for chan in list(programguide.keys()):
floatnum = str(float(chan))
programguide[floatnum] = programguide.pop(chan)
programguide[floatnum]["number"] = floatnum
programguide = OrderedDict(sorted(programguide.items()))
for cnum in programguide:
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
self.epgdict = programguide
self.db.set_fhdhr_value("epg_dict", method, programguide)
self.db.set_fhdhr_value("update_time", method, time.time())
self.logger.info("Wrote " + epgtypename + " EPG cache.")
def run(self):
for epg_method in self.epg_methods:
self.update(epg_method)
try:
while True:
for epg_method in self.epg_methods:
if time.time() >= (self.db.get_fhdhr_value("update_time", epg_method) + self.sleeptime[epg_method]):
self.update(epg_method)
time.sleep(3600)
except KeyboardInterrupt:
pass

View File

@ -0,0 +1,318 @@
import time
import datetime
import threading
from fHDHR.tools import channel_sort
from .blocks import blocksEPG
class EPG():
def __init__(self, fhdhr, channels, origins):
self.fhdhr = fhdhr
self.origins = origins
self.channels = channels
self.epgdict = {}
self.epg_methods = self.fhdhr.config.dict["epg"]["method"] or []
self.valid_epg_methods = [x for x in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()) if x and x not in [None, "None"]]
self.blocks = blocksEPG(self.fhdhr, self.channels, self.origins, None)
self.epg_handling = {}
self.epg_method_selfadd()
self.def_method = self.fhdhr.config.dict["epg"]["def_method"]
self.sleeptime = {}
for epg_method in self.epg_methods:
if epg_method in list(self.fhdhr.config.dict.keys()):
if "update_frequency" in list(self.fhdhr.config.dict[epg_method].keys()):
self.sleeptime[epg_method] = self.fhdhr.config.dict[epg_method]["update_frequency"]
if epg_method not in list(self.sleeptime.keys()):
self.sleeptime[epg_method] = self.fhdhr.config.dict["epg"]["update_frequency"]
self.epg_update_url = "/api/epg?method=update"
self.fhdhr.threads["epg"] = threading.Thread(target=self.run)
def clear_epg_cache(self, method=None):
if not method:
if not self.def_method:
return
if method not in self.valid_epg_methods:
if not self.def_method:
return
method = self.def_method
self.fhdhr.logger.info("Clearing %s EPG cache." % method)
if hasattr(self.epg_handling[method], 'clear_cache'):
self.epg_handling[method].clear_cache()
if method in list(self.epgdict.keys()):
del self.epgdict[method]
self.fhdhr.db.delete_fhdhr_value("epg_dict", method)
def whats_on_now(self, channel_number, method=None, chan_obj=None, chan_dict=None):
nowtime = time.time()
epgdict = self.get_epg(method)
if channel_number not in list(epgdict.keys()):
epgdict[channel_number] = {
"callsign": "",
"name": "",
"number": str(channel_number),
"id": "",
"thumbnail": "",
"listing": []
}
for listing in epgdict[channel_number]["listing"]:
for time_item in ["time_start", "time_end"]:
time_value = listing[time_item]
if str(time_value).endswith("+00:00"):
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
elif str(time_value).endswith("+0000"):
listing[time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
else:
listing[time_item] = int(time_value)
if int(listing["time_start"]) <= nowtime <= int(listing["time_end"]):
epgitem = epgdict[channel_number].copy()
epgitem["listing"] = [listing]
return epgitem
epgitem = epgdict[channel_number].copy()
epgitem["listing"] = [self.blocks.empty_listing(chan_obj=None, chan_dict=None)]
return epgitem
def whats_on_allchans(self, method=None):
if not method:
if not self.def_method:
return
method = self.def_method
if method not in self.valid_epg_methods:
if not self.def_method:
return
method = self.def_method
channel_guide_dict = {}
epgdict = self.get_epg(method)
epgdict = epgdict.copy()
for c in list(epgdict.keys()):
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
chan_obj = self.channels.get_channel_obj("origin_id", epgdict[c]["id"])
channel_number = chan_obj.number
epgdict[channel_number] = epgdict.pop(c)
epgdict[channel_number]["name"] = chan_obj.dict["name"]
epgdict[channel_number]["callsign"] = chan_obj.dict["callsign"]
epgdict[channel_number]["number"] = chan_obj.number
epgdict[channel_number]["id"] = chan_obj.dict["origin_id"]
epgdict[channel_number]["thumbnail"] = chan_obj.thumbnail
else:
chan_obj = None
channel_number = c
whatson = self.whats_on_now(channel_number, method, chan_dict=epgdict, chan_obj=chan_obj)
if whatson:
channel_guide_dict[channel_number] = whatson
return channel_guide_dict
def get_epg(self, method=None):
if not method:
if not self.def_method:
return
method = self.def_method
if method not in self.valid_epg_methods:
if not self.def_method:
return
method = self.def_method
if method in list(self.epgdict.keys()):
return self.epgdict[method]
self.update(method)
self.epgdict[method] = self.fhdhr.db.get_fhdhr_value("epg_dict", method) or {}
return self.epgdict[method]
def get_thumbnail(self, itemtype, itemid):
if itemtype == "channel":
chandict = self.find_channel_dict(itemid)
return chandict["thumbnail"]
elif itemtype == "content":
progdict = self.find_program_dict(itemid)
return progdict["thumbnail"]
return None
def find_channel_dict(self, channel_id):
epgdict = self.get_epg()
channel_list = [epgdict[x] for x in list(epgdict.keys())]
return next(item for item in channel_list if item["id"] == channel_id) or None
def find_program_dict(self, event_id):
epgdict = self.get_epg()
event_list = []
for channel in list(epgdict.keys()):
event_list.extend(epgdict[channel]["listing"])
return next(item for item in event_list if item["id"] == event_id) or None
def epg_method_selfadd(self):
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_epg":
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
self.epg_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.channels, self.fhdhr.plugins.plugins[plugin_name].plugin_utils)
for origin in list(self.origins.origins_dict.keys()):
if origin.lower() not in list(self.epg_handling.keys()):
self.epg_handling[origin.lower()] = blocksEPG(self.fhdhr, self.channels, self.origins, origin)
self.fhdhr.config.register_valid_epg_method(origin, "Blocks")
self.valid_epg_methods.append(origin.lower())
def update(self, method=None):
if not method:
if not self.def_method:
return
method = self.def_method
if method not in self.valid_epg_methods:
if not self.def_method:
return
method = self.def_method
self.fhdhr.logger.info("Updating %s EPG cache." % method)
programguide = self.epg_handling[method].update_epg()
# sort the channel listings by time stamp
for cnum in list(programguide.keys()):
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
# Gernate Block periods for between EPG data, if missing
clean_prog_guide = {}
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
for cnum in list(programguide.keys()):
if cnum not in list(clean_prog_guide.keys()):
clean_prog_guide[cnum] = programguide[cnum].copy()
clean_prog_guide[cnum]["listing"] = []
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
chan_obj = self.channels.get_channel_obj("origin_id", programguide[cnum]["id"])
else:
chan_obj = None
# Generate Blocks for Channels containing No Lisiings
if not len(programguide[cnum]["listing"]):
timestamps = self.blocks.timestamps_between(desired_start_time, desired_end_time)
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
else:
# Clean Timetamps from old xmltv method to timestamps
progindex = 0
for program_item in programguide[cnum]["listing"]:
for time_item in ["time_start", "time_end"]:
time_value = programguide[cnum]["listing"][progindex][time_item]
if str(time_value).endswith("+00:00"):
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +00:00').timestamp()
elif str(time_value).endswith("+0000"):
programguide[cnum]["listing"][progindex][time_item] = datetime.datetime.strptime(time_value, '%Y%m%d%H%M%S +0000').timestamp()
else:
programguide[cnum]["listing"][progindex][time_item] = int(time_value)
progindex += 1
# Generate time before the listing actually starts
first_prog_time = programguide[cnum]["listing"][0]['time_start']
if desired_start_time < first_prog_time:
timestamps = self.blocks.timestamps_between(desired_start_time, first_prog_time)
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
# Generate time blocks between events if chunks of time are missing
progindex = 0
for program_item in programguide[cnum]["listing"]:
try:
nextprog_dict = programguide[cnum]["listing"][progindex + 1]
except IndexError:
nextprog_dict = None
if not nextprog_dict:
clean_prog_guide[cnum]["listing"].append(program_item)
else:
if nextprog_dict['time_start'] > program_item['time_end']:
timestamps = self.blocks.timestamps_between(program_item['time_end'], nextprog_dict['time_start'])
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
else:
clean_prog_guide[cnum]["listing"].append(program_item)
progindex += 1
# Generate time after the listing actually ends
end_prog_time = programguide[cnum]["listing"][progindex]['time_end']
if desired_end_time > end_prog_time:
timestamps = self.blocks.timestamps_between(end_prog_time, desired_end_time)
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_dict=programguide[cnum], chan_obj=chan_obj)
clean_prog_guide[cnum]["listing"].extend(clean_prog_dicts)
programguide = clean_prog_guide.copy()
# if a stock method, generate Blocks EPG for missing channels
if method in [origin for origin in list(self.origins.origins_dict.keys())]:
timestamps = self.blocks.timestamps
for fhdhr_id in [x["id"] for x in self.channels.get_channels(method)]:
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, method)
if str(chan_obj.number) not in list(programguide.keys()):
programguide[str(chan_obj.number)] = chan_obj.epgdict
clean_prog_dicts = self.blocks.empty_channel_epg(timestamps, chan_obj=chan_obj)
programguide[str(chan_obj.number)]["listing"].extend(clean_prog_dicts)
# Make Thumbnails for missing thumbnails
for cnum in list(programguide.keys()):
if not programguide[cnum]["thumbnail"]:
programguide[cnum]["thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % programguide[cnum]["number"]
programguide[cnum]["listing"] = sorted(programguide[cnum]["listing"], key=lambda i: i['time_start'])
prog_index = 0
for program_item in programguide[cnum]["listing"]:
if not programguide[cnum]["listing"][prog_index]["thumbnail"]:
programguide[cnum]["listing"][prog_index]["thumbnail"] = programguide[cnum]["thumbnail"]
prog_index += 1
# Get Totals
total_channels = len(list(programguide.keys()))
total_programs = 0
# Sort the channels
sorted_channel_list = channel_sort(list(programguide.keys()))
sorted_chan_guide = {}
for channel in sorted_channel_list:
total_programs += len(programguide[cnum]["listing"])
sorted_chan_guide[channel] = programguide[channel]
self.epgdict[method] = sorted_chan_guide
self.fhdhr.db.set_fhdhr_value("epg_dict", method, programguide)
self.fhdhr.db.set_fhdhr_value("update_time", method, time.time())
self.fhdhr.logger.info("Wrote %s EPG cache. %s Programs for %s Channels" % (method, total_programs, total_channels))
def start(self):
self.fhdhr.logger.info("EPG Update Thread Starting")
self.fhdhr.threads["epg"].start()
def stop(self):
self.fhdhr.logger.info("EPG Update Thread Stopping")
def run(self):
time.sleep(1800)
while True:
for epg_method in self.epg_methods:
last_update_time = self.fhdhr.db.get_fhdhr_value("update_time", epg_method)
updatetheepg = False
if not last_update_time:
updatetheepg = True
elif time.time() >= (last_update_time + self.sleeptime[epg_method]):
updatetheepg = True
if updatetheepg:
self.fhdhr.api.get("%s&source=%s" % (self.epg_update_url, epg_method))
time.sleep(1800)
self.stop()

120
fHDHR/device/epg/blocks.py Normal file
View File

@ -0,0 +1,120 @@
import datetime
class blocksEPG():
def __init__(self, fhdhr, channels, origins, origin):
self.fhdhr = fhdhr
self.channels = channels
self.origins = origins
self.origin = origin
def update_epg(self):
programguide = {}
timestamps = self.timestamps
for fhdhr_id in [x["id"] for x in self.channels.get_channels(self.origin)]:
chan_obj = self.channels.get_channel_obj("id", fhdhr_id, self.origin)
if str(chan_obj.number) not in list(programguide.keys()):
programguide[str(chan_obj.number)] = chan_obj.epgdict
clean_prog_dicts = self.empty_channel_epg(timestamps, chan_obj=chan_obj)
for clean_prog_dict in clean_prog_dicts:
programguide[str(chan_obj.number)]["listing"].append(clean_prog_dict)
return programguide
@property
def timestamps(self):
desired_start_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["reverse_days"])).timestamp()
desired_end_time = (datetime.datetime.today() + datetime.timedelta(days=self.fhdhr.config.dict["epg"]["forward_days"])).timestamp()
return self.timestamps_between(desired_start_time, desired_end_time)
def timestamps_between(self, starttime, endtime):
timestamps = []
desired_blocksize = self.fhdhr.config.dict["epg"]["block_size"]
current_time = starttime
while (current_time + desired_blocksize) <= endtime:
timestampdict = {
"time_start": current_time,
"time_end": current_time + desired_blocksize,
}
timestamps.append(timestampdict)
current_time += desired_blocksize
if current_time < endtime:
timestampdict = {
"time_start": current_time,
"time_end": endtime
}
timestamps.append(timestampdict)
return timestamps
def single_channel_epg(self, timestampdict, chan_obj=None, chan_dict=None):
if chan_obj:
content_id = "%s_%s" % (chan_obj.dict["origin_id"], timestampdict['time_start'])
elif chan_dict:
content_id = "%s_%s" % (chan_dict["id"], timestampdict['time_start'])
clean_prog_dict = {
"time_start": timestampdict['time_start'],
"time_end": timestampdict['time_end'],
"duration_minutes": (timestampdict['time_end'] - timestampdict['time_start']) / 60,
"title": "Unavailable",
"sub-title": "Unavailable",
"description": "Unavailable",
"rating": "N/A",
"episodetitle": None,
"releaseyear": None,
"genres": [],
"seasonnumber": None,
"episodenumber": None,
"isnew": False,
"id": content_id,
}
if chan_obj:
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
elif chan_dict:
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
if not clean_prog_dict["thumbnail"]:
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=%s" % content_id
return clean_prog_dict
def empty_channel_epg(self, timestamps, chan_obj=None, chan_dict=None):
clean_prog_dicts = []
for timestampdict in timestamps:
clean_prog_dict = self.single_channel_epg(timestampdict, chan_obj=chan_obj, chan_dict=chan_dict)
clean_prog_dicts.append(clean_prog_dict)
return clean_prog_dicts
def empty_listing(self, chan_obj=None, chan_dict=None):
clean_prog_dict = {
"time_start": None,
"time_end": None,
"duration_minutes": None,
"title": "Unavailable",
"sub-title": "Unavailable",
"description": "Unavailable",
"rating": "N/A",
"episodetitle": None,
"releaseyear": None,
"genres": [],
"seasonnumber": None,
"episodenumber": None,
"isnew": False,
"id": "Unavailable",
}
if chan_obj:
clean_prog_dict["thumbnail"] = chan_obj.thumbnail
elif chan_dict:
clean_prog_dict["thumbnail"] = chan_dict["thumbnail"]
else:
clean_prog_dict["thumbnail"] = None
if not clean_prog_dict["thumbnail"]:
clean_prog_dict["thumbnail"] = "/api/images?method=generate&type=content&message=Unavailable"
return clean_prog_dict

View File

@ -1,65 +0,0 @@
import datetime
class blocksEPG():
def __init__(self, settings, channels, logger, web, db):
self.config = settings
self.logger = logger
self.channels = channels
self.db = db
def update_epg(self):
programguide = {}
timestamps = []
todaydate = datetime.date.today()
for x in range(0, 6):
xdate = todaydate + datetime.timedelta(days=x)
xtdate = xdate + datetime.timedelta(days=1)
for hour in range(0, 24):
time_start = datetime.datetime.combine(xdate, datetime.time(hour, 0))
if hour + 1 < 24:
time_end = datetime.datetime.combine(xdate, datetime.time(hour + 1, 0))
else:
time_end = datetime.datetime.combine(xtdate, datetime.time(0, 0))
timestampdict = {
"time_start": str(time_start.strftime('%Y%m%d%H%M%S')) + " +0000",
"time_end": str(time_end.strftime('%Y%m%d%H%M%S')) + " +0000",
}
timestamps.append(timestampdict)
for c in self.channels.get_channels():
if str(c["number"]) not in list(programguide.keys()):
programguide[str(c["number"])] = {
"callsign": c["callsign"],
"name": c["name"],
"number": c["number"],
"id": c["id"],
"thumbnail": ("/api/images?method=generate&type=channel&message=%s" % (str(c['number']))),
"listing": [],
}
for timestamp in timestamps:
clean_prog_dict = {
"time_start": timestamp['time_start'],
"time_end": timestamp['time_end'],
"duration_minutes": 60,
"thumbnail": ("/api/images?method=generate&type=content&message=%s" % (str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0])),
"title": "Unavailable",
"sub-title": "Unavailable",
"description": "Unavailable",
"rating": "N/A",
"episodetitle": None,
"releaseyear": None,
"genres": [],
"seasonnumber": None,
"episodenumber": None,
"isnew": False,
"id": str(c["id"]) + "_" + str(timestamp['time_start']).split(" ")[0],
}
programguide[str(c["number"])]["listing"].append(clean_prog_dict)
return programguide

View File

@ -1,157 +0,0 @@
import json
import time
import datetime
import urllib.parse
from fHDHR.tools import xmldictmaker
from fHDHR.exceptions import EPGSetupError
class zap2itEPG():
def __init__(self, settings, channels, logger, web, db):
self.config = settings
self.logger = logger
self.channels = channels
self.web = web
self.db = db
self.postalcode = self.config.dict["zap2it"]["postalcode"]
self.web_cache_dir = self.config.dict["filedir"]["epg_cache"]["zap2it"]["web_cache"]
def get_location(self):
self.logger.warning("Zap2it postalcode not set, attempting to retrieve.")
if not self.postalcode:
try:
postalcode_url = 'http://ipinfo.io/json'
postalcode_req = self.web.session.get(postalcode_url)
data = postalcode_req.json()
self.postalcode = data["postal"]
except Exception as e:
raise EPGSetupError("Unable to automatically optain zap2it postalcode: " + str(e))
return self.postalcode
def update_epg(self):
programguide = {}
# Start time parameter is now rounded down to nearest `zap_timespan`, in s.
zap_time = datetime.datetime.utcnow().timestamp()
zap_time_window = int(self.config.dict["zap2it"]["timespan"]) * 3600
zap_time = int(zap_time - (zap_time % zap_time_window))
self.remove_stale_cache(zap_time)
# Fetch data in `zap_timespan` chunks.
for i in range(int(7 * 24 / int(self.config.dict["zap2it"]["timespan"]))):
i_time = zap_time + (i * zap_time_window)
parameters = {
'aid': self.config.dict["zap2it"]['affiliate_id'],
'country': self.config.dict["zap2it"]['country'],
'device': self.config.dict["zap2it"]['device'],
'headendId': self.config.dict["zap2it"]['headendid'],
'isoverride': "true",
'languagecode': self.config.dict["zap2it"]['languagecode'],
'pref': 'm,p',
'timespan': self.config.dict["zap2it"]['timespan'],
'timezone': self.config.dict["zap2it"]['timezone'],
'userId': self.config.dict["zap2it"]['userid'],
'postalCode': str(self.postalcode or self.get_location()),
'lineupId': '%s-%s-DEFAULT' % (self.config.dict["zap2it"]['country'], self.config.dict["zap2it"]['device']),
'time': i_time,
'Activity_ID': 1,
'FromPage': "TV%20Guide",
}
url = 'https://tvlistings.zap2it.com/api/grid?'
url += urllib.parse.urlencode(parameters)
result = self.get_cached(str(i_time), self.config.dict["zap2it"]['delay'], url)
d = json.loads(result)
for c in d['channels']:
cdict = xmldictmaker(c, ["callSign", "name", "channelNo", "channelId", "thumbnail"])
if str(cdict['channelNo']) not in list(programguide.keys()):
programguide[str(cdict['channelNo'])] = {
"callsign": cdict["callSign"],
"name": cdict["name"] or cdict["callSign"], # TODO
"number": str(cdict["channelNo"]),
"id": str(cdict["channelId"]),
"thumbnail": str(cdict['thumbnail']).replace("//", "https://").split("?")[0],
"listing": [],
}
for event in c['events']:
eventdict = xmldictmaker(event, ["startTime", "endTime", "duration", "rating", "flag"], list_items=["filter", "flag"])
progdict = xmldictmaker(event['program'], ["title", "sub-title", "releaseYear", "episodeTitle", "shortDesc", "season", "episode", "id"])
clean_prog_dict = {
"time_start": self.xmltimestamp_zap(eventdict['startTime']),
"time_end": self.xmltimestamp_zap(eventdict['endTime']),
"duration_minutes": eventdict['duration'],
"thumbnail": str("https://zap2it.tmsimg.com/assets/" + str(eventdict['thumbnail']) + ".jpg"),
"title": progdict['title'] or "Unavailable",
"sub-title": progdict['sub-title'] or "Unavailable",
"description": progdict['shortDesc'] or "Unavailable",
"rating": eventdict['rating'] or "N/A",
"episodetitle": progdict['episodeTitle'],
"releaseyear": progdict['releaseYear'],
"genres": [],
"seasonnumber": progdict['season'],
"episodenumber": progdict['episode'],
"isnew": False,
"id": str(progdict['id'] or self.xmltimestamp_zap(eventdict['startTime'])),
}
for f in eventdict['filter']:
clean_prog_dict["genres"].append(f.replace('filter-', ''))
if 'movie' in clean_prog_dict['genres'] and clean_prog_dict['releaseyear']:
clean_prog_dict["sub-title"] = 'Movie: ' + clean_prog_dict['releaseyear']
elif clean_prog_dict['episodetitle']:
clean_prog_dict["sub-title"] = clean_prog_dict['episodetitle']
if 'New' in eventdict['flag'] and 'live' not in eventdict['flag']:
clean_prog_dict["isnew"] = True
programguide[str(cdict["channelNo"])]["listing"].append(clean_prog_dict)
return programguide
def xmltimestamp_zap(self, inputtime):
xmltime = inputtime.replace('Z', '+00:00')
xmltime = datetime.datetime.fromisoformat(xmltime)
xmltime = xmltime.strftime('%Y%m%d%H%M%S %z')
return xmltime
def get_cached(self, cache_key, delay, url):
cache_path = self.web_cache_dir.joinpath(cache_key)
if cache_path.is_file():
self.logger.info('FROM CACHE: ' + str(cache_path))
with open(cache_path, 'rb') as f:
return f.read()
else:
self.logger.info('Fetching: ' + url)
resp = self.web.session.get(url)
result = resp.content
with open(cache_path, 'wb') as f:
f.write(result)
time.sleep(int(delay))
return result
def remove_stale_cache(self, zap_time):
for p in self.web_cache_dir.glob('*'):
try:
t = int(p.name)
if t >= zap_time:
continue
except Exception as e:
self.logger.error(e)
pass
self.logger.info('Removing stale cache file: ' + p.name)
p.unlink()

View File

@ -6,18 +6,15 @@ import PIL.ImageFont
class imageHandler():
def __init__(self, settings, epg, logger, web):
self.config = settings
self.logger = logger
self.epg = epg
self.web = web
def __init__(self, fhdhr, epg):
self.fhdhr = fhdhr
def get_epg_image(self, image_type, content_id):
imageUri = self.epg.get_thumbnail(image_type, str(content_id))
if not imageUri:
return self.generate_image(image_type, str(content_id))
req = self.web.session.get(imageUri)
req = self.fhdhr.web.session.get(imageUri)
return req.content
def getSize(self, txt, font):
@ -38,7 +35,7 @@ class imageHandler():
colorBackground = "#228822"
colorText = "#717D7E"
colorOutline = "#717D7E"
fontname = str(self.config.dict["filedir"]["font"])
fontname = str(self.fhdhr.config.internal["paths"]["font"])
font = PIL.ImageFont.truetype(fontname, fontsize)
text_width, text_height = self.getSize(message, font)

View File

@ -1,192 +0,0 @@
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
import socket
import struct
class fHDHR_Detect():
def __init__(self, settings, logger, db):
self.config = settings
self.db = db
self.db.delete_fhdhr_value("ssdp_detect", "list")
def set(self, location):
detect_list = self.db.get_fhdhr_value("ssdp_detect", "list") or []
if location not in detect_list:
detect_list.append(location)
self.db.set_fhdhr_value("ssdp_detect", "list", detect_list)
def get(self):
return self.db.get_fhdhr_value("ssdp_detect", "list") or []
class SSDPServer():
def __init__(self, settings, fhdhr_version, logger, db):
self.config = settings
self.logger = logger
self.db = db
self.detect_method = fHDHR_Detect(settings, logger, db)
if settings.dict["fhdhr"]["discovery_address"]:
self.sock = None
self.proto = "ipv4"
self.port = 1900
self.iface = None
self.address = None
self.server = 'fHDHR/%s UPnP/1.0' % fhdhr_version
allowed_protos = ("ipv4", "ipv6")
if self.proto not in allowed_protos:
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
self.nt = 'urn:schemas-upnp-org:device:MediaServer:1'
self.usn = 'uuid:' + settings.dict["main"]["uuid"] + '::' + self.nt
self.location = ('http://' + settings.dict["fhdhr"]["discovery_address"] + ':' +
str(settings.dict["fhdhr"]["port"]) + '/device.xml')
self.al = self.location
self.max_age = 1800
self._iface = None
if self.proto == "ipv4":
self._af_type = socket.AF_INET
self._broadcast_ip = "239.255.255.250"
self._address = (self._broadcast_ip, self.port)
self.bind_address = "0.0.0.0"
elif self.proto == "ipv6":
self._af_type = socket.AF_INET6
self._broadcast_ip = "ff02::c"
self._address = (self._broadcast_ip, self.port, 0, 0)
self.bind_address = "::"
self.broadcast_addy = "{}:{}".format(self._broadcast_ip, self.port)
self.sock = socket.socket(self._af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# Bind to specific interface
if self.iface is not None:
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
# Subscribe to multicast address
if self.proto == "ipv4":
mreq = socket.inet_aton(self._broadcast_ip)
if self.address is not None:
mreq += socket.inet_aton(self.address)
else:
mreq += struct.pack(b"@I", socket.INADDR_ANY)
self.sock.setsockopt(
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq,
)
# Allow multicasts on loopback devices (necessary for testing)
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
elif self.proto == "ipv6":
# In IPv6 we use the interface index, not the address when subscribing to the group
mreq = socket.inet_pton(socket.AF_INET6, self._broadcast_ip)
if self.iface is not None:
iface_index = socket.if_nametoindex(self.iface)
# Send outgoing packets from the same interface
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
mreq += struct.pack(b"@I", iface_index)
else:
mreq += socket.inet_pton(socket.AF_INET6, "::")
self.sock.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
)
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)
self.sock.bind((self.bind_address, self.port))
self.notify_payload = self.create_notify_payload()
self.msearch_payload = self.create_msearch_payload()
self.m_search()
def on_recv(self, data, address):
self.logger.debug("Received packet from {}: {}".format(address, data))
(host, port) = address
header, payload = data.decode().split('\r\n\r\n')[:2]
lines = header.split('\r\n')
cmd = lines[0].split(' ')
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
lines = filter(lambda x: len(x) > 0, lines)
headers = [x.split(':', 1) for x in lines]
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
# SSDP discovery
self.logger.debug("Received qualifying M-SEARCH from {}".format(address))
self.logger.debug("M-SEARCH data: {}".format(headers))
notify = self.notify_payload
self.logger.debug("Created NOTIFY: {}".format(notify))
try:
self.sock.sendto(notify, address)
except OSError as e:
# Most commonly: We received a multicast from an IP not in our subnet
self.logger.debug("Unable to send NOTIFY to {}: {}".format(address, e))
pass
elif cmd[0] == 'NOTIFY' and cmd[1] == '*':
# SSDP presence
self.logger.debug("NOTIFY data: {}".format(headers))
if headers["server"].startswith("fHDHR"):
if headers["location"] != self.location:
self.detect_method.set(headers["location"].split("/device.xml")[0])
else:
self.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
def m_search(self):
data = self.msearch_payload
self.sock.sendto(data, self._address)
def create_notify_payload(self):
if self.max_age is not None and not isinstance(self.max_age, int):
raise ValueError("max_age must by of type: int")
data = (
"NOTIFY * HTTP/1.1\r\n"
"HOST:{}\r\n"
"NT:{}\r\n"
"NTS:ssdp:alive\r\n"
"USN:{}\r\n"
"SERVER:{}\r\n"
).format(
self._broadcast_ip,
self.nt,
self.usn,
self.server
)
if self.location is not None:
data += "LOCATION:{}\r\n".format(self.location)
if self.al is not None:
data += "AL:{}\r\n".format(self.al)
if self.max_age is not None:
data += "Cache-Control:max-age={}\r\n".format(self.max_age)
data += "\r\n"
return data.encode("utf-8")
def create_msearch_payload(self):
data = (
"M-SEARCH * HTTP/1.1\r\n"
"HOST:{}\r\n"
'MAN: "ssdp:discover"\r\n'
"ST:{}\r\n"
"MX:{}\r\n"
).format(
self.broadcast_addy,
"ssdp:all",
1
)
data += "\r\n"
return data.encode("utf-8")
def run(self):
try:
while True:
data, address = self.sock.recvfrom(1024)
self.on_recv(data, address)
except KeyboardInterrupt:
self.sock.close()

View File

@ -0,0 +1,214 @@
# Adapted from https://github.com/MoshiBin/ssdpy and https://github.com/ZeWaren/python-upnp-ssdp-example
import socket
import struct
import time
import threading
class SSDPServer():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.ssdp_handling = {}
self.methods = [x for x in list(self.fhdhr.plugins.plugins.keys()) if self.fhdhr.plugins.plugins[x].type == "ssdp"]
if (self.fhdhr.config.dict["fhdhr"]["discovery_address"] and
self.fhdhr.config.dict["ssdp"]["enabled"] and
len(self.methods)):
self.fhdhr.threads["ssdp"] = threading.Thread(target=self.run)
self.setup_ssdp()
self.sock.bind((self.bind_address, 1900))
self.msearch_payload = self.create_msearch_payload()
self.max_age = int(fhdhr.config.dict["ssdp"]["max_age"])
self.age_time = None
self.ssdp_method_selfadd()
self.do_alive()
self.m_search()
def ssdp_method_selfadd(self):
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].type == "ssdp":
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
self.ssdp_handling[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils, self.broadcast_ip, self.max_age)
def start(self):
self.fhdhr.logger.info("SSDP Server Starting")
self.fhdhr.threads["ssdp"].start()
def stop(self):
self.fhdhr.logger.info("SSDP Server Stopping")
self.sock.close()
def run(self):
while True:
data, address = self.sock.recvfrom(1024)
self.on_recv(data, address)
self.do_alive()
self.stop()
def do_alive(self, forcealive=False):
send_alive = False
if not self.age_time:
send_alive = True
elif forcealive:
send_alive = True
elif time.time() >= (self.age_time + self.max_age):
send_alive = True
if send_alive:
self.fhdhr.logger.info("Sending Alive message to network.")
self.do_notify(self.broadcast_address_tuple)
self.age_time = time.time()
def do_notify(self, address):
notify_list = []
for ssdp_handler in list(self.ssdp_handling.keys()):
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'notify'):
notify_data = self.ssdp_handling[ssdp_handler].notify
if isinstance(notify_data, list):
notify_list.extend(notify_data)
else:
notify_list.append(notify_data)
for notifydata in notify_list:
notifydata = notifydata.encode("utf-8")
self.fhdhr.logger.debug("Created {}".format(notifydata))
try:
self.sock.sendto(notifydata, address)
except OSError as e:
# Most commonly: We received a multicast from an IP not in our subnet
self.fhdhr.logger.debug("Unable to send NOTIFY: %s" % e)
pass
def on_recv(self, data, address):
self.fhdhr.logger.debug("Received packet from {}: {}".format(address, data))
try:
header, payload = data.decode().split('\r\n\r\n')[:2]
except ValueError:
self.fhdhr.logger.error("Error with Received packet from {}: {}".format(address, data))
return
lines = header.split('\r\n')
cmd = lines[0].split(' ')
lines = map(lambda x: x.replace(': ', ':', 1), lines[1:])
lines = filter(lambda x: len(x) > 0, lines)
headers = [x.split(':', 1) for x in lines]
headers = dict(map(lambda x: (x[0].lower(), x[1]), headers))
for ssdp_handler in list(self.ssdp_handling.keys()):
if self.ssdp_handling[ssdp_handler].enabled and hasattr(self.ssdp_handling[ssdp_handler], 'on_recv'):
self.ssdp_handling[ssdp_handler].on_recv(headers, cmd, list(self.ssdp_handling.keys()))
if cmd[0] == 'M-SEARCH' and cmd[1] == '*':
# SSDP discovery
self.fhdhr.logger.debug("Received qualifying M-SEARCH from {}".format(address))
self.fhdhr.logger.debug("M-SEARCH data: {}".format(headers))
self.do_notify(address)
if cmd[0] == 'NOTIFY' and cmd[1] == '*':
self.fhdhr.logger.debug("NOTIFY data: {}".format(headers))
else:
self.fhdhr.logger.debug('Unknown SSDP command %s %s' % (cmd[0], cmd[1]))
def m_search(self):
data = self.msearch_payload
self.sock.sendto(data, self.broadcast_address_tuple)
def create_msearch_payload(self):
data = ''
data_command = "M-SEARCH * HTTP/1.1"
data_dict = {
"HOST": "%s:%s" % (self.broadcast_ip, 1900),
"MAN": "ssdp:discover",
"ST": "ssdp:all",
"MX": 1,
}
data += "%s\r\n" % data_command
for data_key in list(data_dict.keys()):
data += "%s:%s\r\n" % (data_key, data_dict[data_key])
data += "\r\n"
return data.encode("utf-8")
def setup_ssdp(self):
self.sock = None
self.proto = self.setup_proto()
self.iface = self.fhdhr.config.dict["ssdp"]["iface"]
self.address = self.fhdhr.config.dict["ssdp"]["multicast_address"]
self.setup_addressing()
self.sock = socket.socket(self.af_type, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.setup_interface()
self.setup_multicasting()
def setup_proto(self):
proto = self.fhdhr.config.dict["ssdp"]["proto"]
allowed_protos = ("ipv4", "ipv6")
if proto not in allowed_protos:
raise ValueError("Invalid proto - expected one of {}".format(allowed_protos))
return proto
def setup_addressing(self):
if self.proto == "ipv4":
self.af_type = socket.AF_INET
self.broadcast_ip = "239.255.255.250"
self.broadcast_address_tuple = (self.broadcast_ip, 1900)
self.bind_address = "0.0.0.0"
elif self.proto == "ipv6":
self.af_type = socket.AF_INET6
self.broadcast_ip = "ff02::c"
self.broadcast_address_tuple = (self.broadcast_ip, 1900, 0, 0)
self.bind_address = "::"
def setup_interface(self):
# Bind to specific interface
if self.iface is not None:
self.sock.setsockopt(socket.SOL_SOCKET, getattr(socket, "SO_BINDTODEVICE", 25), self.iface)
def setup_multicasting(self):
# Subscribe to multicast address
if self.proto == "ipv4":
mreq = socket.inet_aton(self.broadcast_ip)
if self.address is not None:
mreq += socket.inet_aton(self.address)
else:
mreq += struct.pack(b"@I", socket.INADDR_ANY)
self.sock.setsockopt(
socket.IPPROTO_IP, socket.IP_ADD_MEMBERSHIP, mreq)
# Allow multicasts on loopback devices (necessary for testing)
self.sock.setsockopt(socket.IPPROTO_IP, socket.IP_MULTICAST_LOOP, 1)
elif self.proto == "ipv6":
# In IPv6 we use the interface index, not the address when subscribing to the group
mreq = socket.inet_pton(socket.AF_INET6, self.broadcast_ip)
if self.iface is not None:
iface_index = socket.if_nametoindex(self.iface)
# Send outgoing packets from the same interface
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_IF, iface_index)
mreq += struct.pack(b"@I", iface_index)
else:
mreq += socket.inet_pton(socket.AF_INET6, "::")
self.sock.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_JOIN_GROUP, mreq,
)
self.sock.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_MULTICAST_LOOP, 1)

View File

@ -1,34 +0,0 @@
from multiprocessing import Process
class Station_Scan():
def __init__(self, settings, channels, logger, db):
self.config = settings
self.logger = logger
self.channels = channels
self.db = db
self.db.delete_fhdhr_value("station_scan", "scanning")
def scan(self):
self.logger.info("Channel Scan Requested by Client.")
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
if not scan_status:
self.db.set_fhdhr_value("station_scan", "scanning", 1)
chanscan = Process(target=self.runscan)
chanscan.start()
else:
self.logger.info("Channel Scan Already In Progress!")
def runscan(self):
self.channels.get_channels(forceupdate=True)
self.logger.info("Requested Channel Scan Complete.")
self.db.delete_fhdhr_value("station_scan", "scanning")
def scanning(self):
scan_status = self.db.get_fhdhr_value("station_scan", "scanning")
if not scan_status:
return False
else:
return True

View File

@ -1,108 +0,0 @@
import threading
import datetime
from fHDHR.exceptions import TunerError
from fHDHR.tools import humanized_time
class Tuner():
def __init__(self, inum, epg, logger):
self.logger = logger
self.number = inum
self.epg = epg
self.tuner_lock = threading.Lock()
self.set_off_status()
def grab(self, stream_args):
if self.tuner_lock.locked():
raise TunerError("Tuner #" + str(self.number) + " is not available.")
self.logger.info("Tuner #" + str(self.number) + " to be used for stream.")
self.tuner_lock.acquire()
self.status = {
"status": "Active",
"method": stream_args["method"],
"accessed": stream_args["accessed"],
"proxied_url": stream_args["channelUri"],
"time_start": datetime.datetime.utcnow(),
}
def close(self):
self.logger.info("Tuner #" + str(self.number) + " Shutting Down.")
self.set_off_status()
self.tuner_lock.release()
def get_status(self):
current_status = self.status.copy()
if current_status["status"] == "Active":
current_status["Play Time"] = str(
humanized_time(
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
current_status["time_start"] = str(current_status["time_start"])
current_status["epg"] = self.epg.whats_on_now(current_status["accessed"].split("v")[-1])
return current_status
def set_off_status(self):
self.status = {"status": "Inactive"}
class Tuners():
def __init__(self, settings, epg, logger):
self.config = settings
self.logger = logger
self.epg = epg
self.max_tuners = int(self.config.dict["fhdhr"]["tuner_count"])
for i in range(1, self.max_tuners + 1):
exec("%s = %s" % ("self.tuner_" + str(i), "Tuner(i, epg, logger)"))
def tuner_grab(self, stream_args, tunernum=None):
tunerselected = None
if tunernum:
if tunernum not in range(1, self.max_tuners + 1):
raise TunerError("Tuner " + str(tunernum) + " does not exist.")
eval("self.tuner_" + str(tunernum) + ".grab(stream_args)")
tunerselected = tunernum
else:
for tunernum in range(1, self.max_tuners + 1):
try:
eval("self.tuner_" + str(tunernum) + ".grab(stream_args)")
except TunerError:
continue
else:
tunerselected = tunernum
break
if not tunerselected:
raise TunerError("No Available Tuners.")
else:
return tunerselected
def tuner_close(self, tunernum):
eval("self.tuner_" + str(tunernum) + ".close()")
def status(self):
all_status = {}
for tunernum in range(1, self.max_tuners + 1):
all_status[tunernum] = eval("self.tuner_" + str(tunernum) + ".get_status()")
return all_status
def available_tuner_count(self):
available_tuners = 0
for tunernum in range(1, self.max_tuners + 1):
tuner_status = eval("self.tuner_" + str(tunernum) + ".get_status()")
if tuner_status["status"] == "Inactive":
available_tuners += 1
return available_tuners
def inuse_tuner_count(self):
inuse_tuners = 0
for tunernum in range(1, self.max_tuners + 1):
tuner_status = eval("self.tuner_" + str(tunernum) + ".get_status()")
if tuner_status["status"] == "Active":
inuse_tuners += 1
return inuse_tuners

View File

@ -0,0 +1,229 @@
import m3u8
from fHDHR.exceptions import TunerError
from .tuner import Tuner
class Tuners():
def __init__(self, fhdhr, epg, channels):
self.fhdhr = fhdhr
self.channels = channels
self.epg = epg
self.tuners = {}
for origin in list(self.fhdhr.origins.origins_dict.keys()):
self.tuners[origin] = {}
max_tuners = int(self.fhdhr.origins.origins_dict[origin].tuners)
self.fhdhr.logger.info("Creating %s tuners for %s." % (max_tuners, origin))
for i in range(0, max_tuners):
self.tuners[origin][str(i)] = Tuner(fhdhr, i, epg, origin)
self.alt_stream_handlers = {}
def alt_stream_methods_selfadd(self):
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].type == "alt_stream":
method = self.fhdhr.plugins.plugins[plugin_name].name
self.alt_stream_handlers[method] = self.fhdhr.plugins.plugins[plugin_name]
def get_available_tuner(self, origin):
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if not self.tuners[origin][tunernum].tuner_lock.locked()) or None
def get_scanning_tuner(self, origin):
return next(tunernum for tunernum in list(self.tuners[origin].keys()) if self.tuners[origin][tunernum].status["status"] == "Scanning") or None
def stop_tuner_scan(self, origin):
tunernum = self.get_scanning_tuner(origin)
if tunernum:
self.tuners[origin][str(tunernum)].close()
def tuner_scan(self, origin="all"):
"""Temporarily use a tuner for a scan"""
if origin == "all":
origins = list(self.tuners.keys())
else:
origins = [origin]
for origin in origins:
if not self.available_tuner_count(origin):
raise TunerError("805 - All Tuners In Use")
tunernumber = self.get_available_tuner(origin)
self.tuners[origin][str(tunernumber)].channel_scan(origin)
if not tunernumber:
raise TunerError("805 - All Tuners In Use")
def tuner_grab(self, tuner_number, origin, channel_number):
if str(tuner_number) not in list(self.tuners[origin].keys()):
self.fhdhr.logger.error("Tuner %s does not exist for %s." % (tuner_number, origin))
raise TunerError("806 - Tune Failed")
# TunerError will raise if unavailable
self.tuners[origin][str(tuner_number)].grab(origin, channel_number)
return tuner_number
def first_available(self, origin, channel_number, dograb=True):
if not self.available_tuner_count(origin):
raise TunerError("805 - All Tuners In Use")
tunernumber = self.get_available_tuner(origin)
if not tunernumber:
raise TunerError("805 - All Tuners In Use")
else:
self.tuners[origin][str(tunernumber)].grab(origin, channel_number)
return tunernumber
def tuner_close(self, tunernum, origin):
self.tuners[origin][str(tunernum)].close()
def status(self, origin=None):
all_status = {}
if origin:
for tunernum in list(self.tuners[origin].keys()):
all_status[tunernum] = self.tuners[origin][str(tunernum)].get_status()
else:
for origin in list(self.tuners.keys()):
all_status[origin] = {}
for tunernum in list(self.tuners[origin].keys()):
all_status[origin][tunernum] = self.tuners[origin][str(tunernum)].get_status()
return all_status
def available_tuner_count(self, origin):
available_tuners = 0
for tunernum in list(self.tuners[origin].keys()):
if not self.tuners[origin][str(tunernum)].tuner_lock.locked():
available_tuners += 1
return available_tuners
def inuse_tuner_count(self, origin):
inuse_tuners = 0
for tunernum in list(self.tuners[origin].keys()):
if self.tuners[origin][str(tunernum)].tuner_lock.locked():
inuse_tuners += 1
return inuse_tuners
def get_stream_info(self, stream_args):
stream_info = self.channels.get_channel_stream(stream_args, stream_args["origin"])
if not stream_info:
raise TunerError("806 - Tune Failed")
if isinstance(stream_info, str):
stream_info = {"url": stream_info, "headers": None}
stream_args["stream_info"] = stream_info
if not stream_args["stream_info"]["url"]:
raise TunerError("806 - Tune Failed")
if "headers" not in list(stream_args["stream_info"].keys()):
stream_args["stream_info"]["headers"] = None
if stream_args["stream_info"]["url"].startswith("udp://"):
stream_args["true_content_type"] = "video/mpeg"
stream_args["content_type"] = "video/mpeg"
else:
channel_stream_url_headers = self.fhdhr.web.session.head(stream_args["stream_info"]["url"]).headers
stream_args["true_content_type"] = channel_stream_url_headers['Content-Type']
if stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
stream_args["content_type"] = "video/mpeg"
if stream_args["origin_quality"] != -1:
stream_args["stream_info"]["url"] = self.m3u8_quality(stream_args)
else:
stream_args["content_type"] = stream_args["true_content_type"]
return stream_args
def m3u8_quality(self, stream_args):
m3u8_url = stream_args["stream_info"]["url"]
quality_profile = stream_args["origin_quality"]
if not quality_profile:
if stream_args["method"] == "direct":
quality_profile = "high"
self.fhdhr.logger.info("Origin Quality not set in config. Direct Method set and will default to Highest Quality")
else:
self.fhdhr.logger.info("Origin Quality not set in config. %s Method will select the Quality Automatically" % stream_args["method"])
return m3u8_url
else:
quality_profile = quality_profile.lower()
self.fhdhr.logger.info("Origin Quality set in config to %s" % (quality_profile))
while True:
self.fhdhr.logger.info("Opening m3u8 for reading %s" % m3u8_url)
try:
if stream_args["stream_info"]["headers"]:
videoUrlM3u = m3u8.load(m3u8_url, headers=stream_args["stream_info"]["headers"])
else:
videoUrlM3u = m3u8.load(m3u8_url)
except Exception as e:
self.fhdhr.logger.info("m3u8 load error: %s" % e)
return m3u8_url
if len(videoUrlM3u.playlists):
self.fhdhr.logger.info("%s m3u8 varients found" % len(videoUrlM3u.playlists))
# Create list of dicts
playlists, playlist_index = {}, 0
for playlist_item in videoUrlM3u.playlists:
playlist_index += 1
playlist_dict = {
"url": playlist_item.absolute_uri,
"bandwidth": playlist_item.stream_info.bandwidth,
}
if not playlist_item.stream_info.resolution:
playlist_dict["width"] = None
playlist_dict["height"] = None
else:
try:
playlist_dict["width"] = playlist_item.stream_info.resolution[0]
playlist_dict["height"] = playlist_item.stream_info.resolution[1]
except TypeError:
playlist_dict["width"] = None
playlist_dict["height"] = None
playlists[playlist_index] = playlist_dict
sorted_playlists = sorted(playlists, key=lambda i: (
int(playlists[i]['bandwidth']),
int(playlists[i]['width'] or 0),
int(playlists[i]['height'] or 0)
))
sorted_playlists = [playlists[x] for x in sorted_playlists]
if not quality_profile or quality_profile == "high":
selected_index = -1
elif quality_profile == "medium":
selected_index = int((len(sorted_playlists) - 1)/2)
elif quality_profile == "low":
selected_index = 0
m3u8_stats = ",".join(
["%s %s" % (x, sorted_playlists[selected_index][x])
for x in list(sorted_playlists[selected_index].keys())
if x != "url" and sorted_playlists[selected_index][x]])
self.fhdhr.logger.info("Selected m3u8 details: %s" % m3u8_stats)
m3u8_url = sorted_playlists[selected_index]["url"]
else:
self.fhdhr.logger.info("No m3u8 varients found")
break
return m3u8_url

View File

@ -0,0 +1,23 @@
from .direct_stream import Direct_Stream
from .direct_m3u8_stream import Direct_M3U8_Stream
class Stream():
def __init__(self, fhdhr, stream_args, tuner):
self.fhdhr = fhdhr
self.stream_args = stream_args
if stream_args["method"] == "direct":
if self.stream_args["true_content_type"].startswith(tuple(["application/", "text/"])):
self.method = Direct_M3U8_Stream(fhdhr, stream_args, tuner)
else:
self.method = Direct_Stream(fhdhr, stream_args, tuner)
else:
plugin_name = self.fhdhr.config.dict["streaming"]["valid_methods"][stream_args["method"]]["plugin"]
self.method = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(fhdhr, self.fhdhr.plugins.plugins[plugin_name].plugin_utils, stream_args, tuner)
def get(self):
return self.method.get()

View File

@ -0,0 +1,109 @@
import sys
import time
import m3u8
from Crypto.Cipher import AES
# from fHDHR.exceptions import TunerError
class Direct_M3U8_Stream():
def __init__(self, fhdhr, stream_args, tuner):
self.fhdhr = fhdhr
self.stream_args = stream_args
self.tuner = tuner
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
def get(self):
if not self.stream_args["duration"] == 0:
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
self.fhdhr.logger.info("Detected stream of m3u8 URL: %s" % self.stream_args["stream_info"]["url"])
if self.stream_args["transcode_quality"]:
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
def generate():
try:
played_chunk_urls = []
while self.tuner.tuner_lock.locked():
try:
if self.stream_args["stream_info"]["headers"]:
playlist = m3u8.load(self.stream_args["stream_info"]["url"], headers=self.stream_args["stream_info"]["headers"])
else:
playlist = m3u8.load(self.stream_args["stream_info"]["url"])
except Exception as e:
self.fhdhr.logger.info("Connection Closed: %s" % e)
self.tuner.close()
return None
segments = playlist.segments
if len(played_chunk_urls):
newsegments = 0
for segment in segments:
if segment.absolute_uri not in played_chunk_urls:
newsegments += 1
self.fhdhr.logger.info("Refreshing m3u8, Loaded %s new segments." % str(newsegments))
else:
self.fhdhr.logger.info("Loaded %s segments." % str(len(segments)))
if playlist.keys != [None]:
keys = [{"url": key.absolute_uri, "method": key.method, "iv": key.iv} for key in playlist.keys if key]
else:
keys = [None for i in range(0, len(segments))]
for segment, key in zip(segments, keys):
chunkurl = segment.absolute_uri
if chunkurl and chunkurl not in played_chunk_urls:
played_chunk_urls.append(chunkurl)
if (not self.stream_args["duration"] == 0 and
not time.time() < self.stream_args["time_end"]):
self.fhdhr.logger.info("Requested Duration Expired.")
self.tuner.close()
if self.stream_args["stream_info"]["headers"]:
chunk = self.fhdhr.web.session.get(chunkurl, headers=self.stream_args["stream_info"]["headers"]).content
else:
chunk = self.fhdhr.web.session.get(chunkurl).content
if not chunk:
break
# raise TunerError("807 - No Video Data")
if key:
if key["url"]:
if self.stream_args["stream_info"]["headers"]:
keyfile = self.fhdhr.web.session.get(key["url"], headers=self.stream_args["stream_info"]["headers"]).content
else:
keyfile = self.fhdhr.web.session.get(key["url"]).content
cryptor = AES.new(keyfile, AES.MODE_CBC, keyfile)
self.fhdhr.logger.info("Decrypting Chunk #%s with key: %s" % (len(played_chunk_urls), key["url"]))
chunk = cryptor.decrypt(chunk)
chunk_size = int(sys.getsizeof(chunk))
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s: %s" % (len(played_chunk_urls), chunk_size, chunkurl))
yield chunk
self.tuner.add_downloaded_size(chunk_size)
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
except GeneratorExit:
self.fhdhr.logger.info("Connection Closed.")
except Exception as e:
self.fhdhr.logger.info("Connection Closed: %s" % e)
finally:
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
self.tuner.close()
# raise TunerError("806 - Tune Failed")
return generate()

View File

@ -0,0 +1,72 @@
import sys
import time
# from fHDHR.exceptions import TunerError
class Direct_Stream():
def __init__(self, fhdhr, stream_args, tuner):
self.fhdhr = fhdhr
self.stream_args = stream_args
self.tuner = tuner
self.bytes_per_read = int(self.fhdhr.config.dict["streaming"]["bytes_per_read"])
def get(self):
if not self.stream_args["duration"] == 0:
self.stream_args["time_end"] = self.stream_args["duration"] + time.time()
self.fhdhr.logger.info("Direct Stream of %s URL: %s" % (self.stream_args["true_content_type"], self.stream_args["stream_info"]["url"]))
if self.stream_args["transcode_quality"]:
self.fhdhr.logger.info("Client requested a %s transcode for stream. Direct Method cannot transcode." % self.stream_args["transcode_quality"])
if self.stream_args["stream_info"]["headers"]:
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True, headers=self.stream_args["stream_info"]["headers"])
else:
req = self.fhdhr.web.session.get(self.stream_args["stream_info"]["url"], stream=True)
def generate():
try:
chunk_counter = 1
while self.tuner.tuner_lock.locked():
for chunk in req.iter_content(chunk_size=self.bytes_per_read):
if (not self.stream_args["duration"] == 0 and
not time.time() < self.stream_args["time_end"]):
req.close()
self.fhdhr.logger.info("Requested Duration Expired.")
self.tuner.close()
if not chunk:
break
# raise TunerError("807 - No Video Data")
chunk_size = int(sys.getsizeof(chunk))
self.fhdhr.logger.info("Passing Through Chunk #%s with size %s" % (chunk_counter, chunk_size))
yield chunk
self.tuner.add_downloaded_size(chunk_size)
chunk_counter += 1
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
except GeneratorExit:
self.fhdhr.logger.info("Connection Closed.")
except Exception as e:
self.fhdhr.logger.info("Connection Closed: %s" % e)
finally:
req.close()
self.fhdhr.logger.info("Connection Closed: Tuner Lock Removed")
if hasattr(self.fhdhr.origins.origins_dict[self.tuner.origin], "close_stream"):
self.fhdhr.origins.origins_dict[self.tuner.origin].close_stream(self.tuner.number, self.stream_args)
self.tuner.close()
# raise TunerError("806 - Tune Failed")
return generate()

View File

@ -0,0 +1,107 @@
import threading
import datetime
from fHDHR.exceptions import TunerError
from fHDHR.tools import humanized_time
from .stream import Stream
class Tuner():
def __init__(self, fhdhr, inum, epg, origin):
self.fhdhr = fhdhr
self.number = inum
self.origin = origin
self.epg = epg
self.tuner_lock = threading.Lock()
self.set_off_status()
self.chanscan_url = "/api/channels?method=scan"
self.close_url = "/api/tuners?method=close&tuner=%s&origin=%s" % (self.number, self.origin)
def channel_scan(self, origin, grabbed=False):
if self.tuner_lock.locked() and not grabbed:
self.fhdhr.logger.error("%s Tuner #%s is not available." % (self.origin, self.number))
raise TunerError("804 - Tuner In Use")
if self.status["status"] == "Scanning":
self.fhdhr.logger.info("Channel Scan Already In Progress!")
else:
if not grabbed:
self.tuner_lock.acquire()
self.status["status"] = "Scanning"
self.status["origin"] = origin
self.status["time_start"] = datetime.datetime.utcnow()
self.fhdhr.logger.info("Tuner #%s Performing Channel Scan for %s origin." % (self.number, origin))
chanscan = threading.Thread(target=self.runscan, args=(origin,))
chanscan.start()
def runscan(self, origin):
self.fhdhr.api.get("%s&origin=%s" % (self.chanscan_url, origin))
self.fhdhr.logger.info("Requested Channel Scan for %s origin Complete." % origin)
self.close()
self.fhdhr.api.get(self.close_url)
def add_downloaded_size(self, bytes_count):
if "downloaded" in list(self.status.keys()):
self.status["downloaded"] += bytes_count
def grab(self, origin, channel_number):
if self.tuner_lock.locked():
self.fhdhr.logger.error("Tuner #%s is not available." % self.number)
raise TunerError("804 - Tuner In Use")
self.tuner_lock.acquire()
self.status["status"] = "Acquired"
self.status["origin"] = origin
self.status["channel"] = channel_number
self.status["time_start"] = datetime.datetime.utcnow()
self.fhdhr.logger.info("Tuner #%s Acquired." % str(self.number))
def close(self):
self.set_off_status()
if self.tuner_lock.locked():
self.tuner_lock.release()
self.fhdhr.logger.info("Tuner #%s Released." % self.number)
def get_status(self):
current_status = self.status.copy()
current_status["epg"] = {}
if current_status["status"] in ["Acquired", "Active", "Scanning"]:
current_status["running_time"] = str(
humanized_time(
int((datetime.datetime.utcnow() - current_status["time_start"]).total_seconds())))
current_status["time_start"] = str(current_status["time_start"])
if current_status["status"] in ["Active"]:
if current_status["origin"] in self.epg.epg_methods:
current_status["epg"] = self.epg.whats_on_now(current_status["channel"], method=current_status["origin"])
return current_status
def set_off_status(self):
self.status = {"status": "Inactive"}
def get_stream(self, stream_args, tuner):
stream = Stream(self.fhdhr, stream_args, tuner)
return stream
def set_status(self, stream_args):
if self.status["status"] != "Active":
self.status = {
"status": "Active",
"clients": [],
"clients_id": [],
"method": stream_args["method"],
"accessed": [stream_args["accessed"]],
"origin": stream_args["origin"],
"channel": stream_args["channel"],
"proxied_url": stream_args["stream_info"]["url"],
"time_start": datetime.datetime.utcnow(),
"downloaded": 0
}
if stream_args["client"] not in self.status["clients"]:
self.status["clients"].append(stream_args["client"])
if stream_args["client_id"] not in self.status["clients_id"]:
self.status["clients_id"].append(stream_args["client_id"])

View File

@ -1,117 +0,0 @@
import subprocess
import time
from fHDHR.exceptions import TunerError
class WatchStream():
def __init__(self, settings, origserv, tuners, logger, web):
self.config = settings
self.logger = logger
self.origserv = origserv
self.tuners = tuners
self.web = web
def direct_stream(self, stream_args, tunernum):
chunksize = int(self.tuners.config.dict["direct_stream"]['chunksize'])
if not stream_args["duration"] == 0:
stream_args["duration"] += time.time()
req = self.web.session.get(stream_args["channelUri"], stream=True)
def generate():
try:
for chunk in req.iter_content(chunk_size=chunksize):
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
req.close()
self.logger.info("Requested Duration Expired.")
break
yield chunk
except GeneratorExit:
req.close()
self.logger.info("Connection Closed.")
self.tuners.tuner_close(tunernum)
return generate()
def ffmpeg_stream(self, stream_args, tunernum):
bytes_per_read = int(self.config.dict["ffmpeg"]["bytes_per_read"])
ffmpeg_command = [self.config.dict["ffmpeg"]["ffmpeg_path"],
"-i", stream_args["channelUri"],
"-c", "copy",
"-f", "mpegts",
"-nostats", "-hide_banner",
"-loglevel", "fatal",
"pipe:stdout"
]
if not stream_args["duration"] == 0:
stream_args["duration"] += time.time()
ffmpeg_proc = subprocess.Popen(ffmpeg_command, stdout=subprocess.PIPE)
def generate():
try:
while True:
if not stream_args["duration"] == 0 and not time.time() < stream_args["duration"]:
ffmpeg_proc.terminate()
ffmpeg_proc.communicate()
self.logger.info("Requested Duration Expired.")
break
videoData = ffmpeg_proc.stdout.read(bytes_per_read)
if not videoData:
break
try:
yield videoData
except Exception as e:
ffmpeg_proc.terminate()
ffmpeg_proc.communicate()
self.logger.info("Connection Closed: " + str(e))
except GeneratorExit:
ffmpeg_proc.terminate()
ffmpeg_proc.communicate()
self.logger.info("Connection Closed.")
self.tuners.tuner_close(tunernum)
return generate()
def get_stream(self, stream_args):
try:
tunernum = self.tuners.tuner_grab(stream_args)
except TunerError as e:
self.logger.info("A " + stream_args["method"] + " stream request for channel " +
str(stream_args["channel"]) + " was rejected do to " + str(e))
return
self.logger.info("Attempting a " + stream_args["method"] + " stream request for channel " + str(stream_args["channel"]))
if stream_args["method"] == "ffmpeg":
return self.ffmpeg_stream(stream_args, tunernum)
elif stream_args["method"] == "direct":
return self.direct_stream(stream_args, tunernum)
def get_stream_info(self, stream_args):
stream_args["channelUri"] = self.origserv.get_channel_stream(str(stream_args["channel"]))
if not stream_args["channelUri"]:
self.logger.error("Could not Obtain Channel Stream.")
stream_args["content_type"] = "video/mpeg"
else:
channelUri_headers = self.web.session.head(stream_args["channelUri"]).headers
stream_args["content_type"] = channelUri_headers['Content-Type']
return stream_args

View File

@ -1,64 +0,0 @@
from gevent.pywsgi import WSGIServer
from flask import Flask
from .pages import fHDHR_Pages
from .files import fHDHR_Files
from .api import fHDHR_API
class fHDHR_HTTP_Server():
app = None
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.app = Flask("fHDHR")
self.pages = fHDHR_Pages(fhdhr)
self.add_endpoints(self.pages, "pages")
self.files = fHDHR_Files(fhdhr)
self.add_endpoints(self.files, "files")
self.api = fHDHR_API(fhdhr)
self.add_endpoints(self.api, "api")
def add_endpoints(self, index_list, index_name):
item_list = [x for x in dir(index_list) if self.isapath(x)]
for item in item_list:
endpoints = eval("self." + str(index_name) + "." + str(item) + ".endpoints")
if isinstance(endpoints, str):
endpoints = [endpoints]
handler = eval("self." + str(index_name) + "." + str(item))
endpoint_name = eval("self." + str(index_name) + "." + str(item) + ".endpoint_name")
try:
endpoint_methods = eval("self." + str(index_name) + "." + str(item) + ".endpoint_methods")
except AttributeError:
endpoint_methods = ['GET']
for endpoint in endpoints:
self.add_endpoint(endpoint=endpoint,
endpoint_name=endpoint_name,
handler=handler,
methods=endpoint_methods)
def isapath(self, item):
not_a_page_list = ["fhdhr", "htmlerror", "page_elements"]
if item in not_a_page_list:
return False
elif item.startswith("__") and item.endswith("__"):
return False
else:
return True
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
self.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
def run(self):
self.http = WSGIServer((
self.fhdhr.config.dict["fhdhr"]["address"],
int(self.fhdhr.config.dict["fhdhr"]["port"])
), self.app.wsgi_app)
try:
self.http.serve_forever()
except KeyboardInterrupt:
self.http.stop()

View File

@ -1,24 +0,0 @@
from .cluster import Cluster
from .channels import Channels
from .lineup_post import Lineup_Post
from .xmltv import xmlTV
from .m3u import M3U
from .debug import Debug_JSON
from .images import Images
class fHDHR_API():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.cluster = Cluster(fhdhr)
self.channels = Channels(fhdhr)
self.xmltv = xmlTV(fhdhr)
self.m3u = M3U(fhdhr)
self.debug = Debug_JSON(fhdhr)
self.lineup_post = Lineup_Post(fhdhr)
self.images = Images(fhdhr)

View File

@ -1,32 +0,0 @@
from flask import request, redirect
import urllib.parse
class Channels():
endpoints = ["/api/channels"]
endpoint_name = "api_channels"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
method = request.args.get('method', default=None, type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "scan":
self.fhdhr.device.station_scan.scan()
else:
return "Invalid Method"
if redirect_url:
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
else:
if method == "scan":
return redirect('/lineup_status.json')
else:
return "%s Success" % method

View File

@ -1,52 +0,0 @@
from flask import request, redirect, Response
import urllib.parse
import json
class Cluster():
endpoints = ["/api/cluster"]
endpoint_name = "api_cluster"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
method = request.args.get('method', default="get", type=str)
location = request.args.get("location", default=None, type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "get":
jsoncluster = self.fhdhr.device.cluster.cluster()
cluster_json = json.dumps(jsoncluster, indent=4)
return Response(status=200,
response=cluster_json,
mimetype='application/json')
elif method == "scan":
self.fhdhr.device.ssdp.m_search()
elif method == 'add':
self.fhdhr.device.cluster.add(location)
elif method == 'del':
self.fhdhr.device.cluster.remove(location)
elif method == 'sync':
self.fhdhr.device.cluster.sync(location)
elif method == 'leave':
self.fhdhr.device.cluster.leave()
elif method == 'disconnect':
self.fhdhr.device.cluster.disconnect()
else:
return "Invalid Method"
if redirect_url:
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
else:
return "%s Success" % method

View File

@ -1,31 +0,0 @@
from flask import request, abort, Response
class Lineup_Post():
endpoints = ["/lineup.post"]
endpoint_name = "lineup_post"
endpoint_methods = ["POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
if 'scan' in list(request.args.keys()):
if request.args['scan'] == 'start':
self.fhdhr.device.station_scan.scan()
return Response(status=200, mimetype='text/html')
elif request.args['scan'] == 'abort':
return Response(status=200, mimetype='text/html')
else:
self.fhdhr.logger.warning("Unknown scan command " + request.args['scan'])
return abort(200, "Not a valid scan command")
else:
return abort(501, "Not a valid command")

View File

@ -1,83 +0,0 @@
from flask import Response, request, redirect
import urllib.parse
from io import StringIO
class M3U():
endpoints = ["/api/m3u", "/api/channels.m3u"]
endpoint_name = "api_m3u"
xmltv_xml = None
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
base_url = request.url_root[:-1]
method = request.args.get('method', default="get", type=str)
channel = request.args.get('channel', default="all", type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "get":
FORMAT_DESCRIPTOR = "#EXTM3U"
RECORD_MARKER = "#EXTINF"
fakefile = StringIO()
xmltvurl = ('%s/api/xmltv' % base_url)
fakefile.write(
"%s\n" % (
FORMAT_DESCRIPTOR + " " +
"url-tvg=\"" + xmltvurl + "\"" + " " +
"x-tvg-url=\"" + xmltvurl + "\"")
)
channel_list = self.fhdhr.device.channels.get_channels()
channel_number_list = [x["number"] for x in channel_list]
if channel == "all":
channel_items = channel_list
elif channel in channel_number_list:
channel_items = [self.fhdhr.device.channels.get_channel_dict("number", channel)]
else:
return "Invalid Channel"
for channel_item in channel_items:
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
(base_url, str(channel_item['id'])))
fakefile.write(
"%s\n" % (
RECORD_MARKER + ":0" + " " +
"channelID=\"" + str(channel_item['id']) + "\" " +
"tvg-chno=\"" + str(channel_item['number']) + "\" " +
"tvg-name=\"" + str(channel_item['name']) + "\" " +
"tvg-id=\"" + str(channel_item['number']) + "\" " +
"tvg-logo=\"" + logourl + "\" " +
"group-title=\"" + self.fhdhr.config.dict["fhdhr"]["friendlyname"] + "," + str(channel_item['name']))
)
fakefile.write(
"%s\n" % (
('%s/auto/v%s' %
(base_url, str(channel_item['number'])))
)
)
channels_m3u = fakefile.getvalue()
return Response(status=200,
response=channels_m3u,
mimetype='text/plain')
if redirect_url:
return redirect(redirect_url + "?retmessage=" + urllib.parse.quote("%s Success" % method))
else:
return "%s Success" % method

View File

@ -1,24 +0,0 @@
from flask import Response, request
import json
class Lineup_JSON():
endpoints = ["/lineup.json"]
endpoint_name = "lineup_json"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
base_url = request.url_root[:-1]
jsonlineup = self.fhdhr.device.channels.get_station_list(base_url)
lineup_json = json.dumps(jsonlineup, indent=4)
return Response(status=200,
response=lineup_json,
mimetype='application/json')

View File

@ -1,46 +0,0 @@
from flask import Response
import json
class Lineup_Status_JSON():
endpoints = ["/lineup_status.json"]
endpoint_name = "lineup_status_json"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
station_scanning = self.fhdhr.device.station_scan.scanning()
if station_scanning:
jsonlineup = self.scan_in_progress()
elif not self.fhdhr.device.channels.get_station_total():
jsonlineup = self.scan_in_progress()
else:
jsonlineup = self.not_scanning()
lineup_json = json.dumps(jsonlineup, indent=4)
return Response(status=200,
response=lineup_json,
mimetype='application/json')
def scan_in_progress(self):
channel_count = self.fhdhr.device.channels.get_station_total()
jsonlineup = {
"ScanInProgress": "true",
"Progress": 99,
"Found": channel_count
}
return jsonlineup
def not_scanning(self):
jsonlineup = {
"ScanInProgress": "false",
"ScanPossible": "true",
"Source": self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"],
"SourceList": [self.fhdhr.config.dict["fhdhr"]["reporting_tuner_type"]],
}
return jsonlineup

View File

@ -1,37 +0,0 @@
from flask import Response, request
from io import BytesIO
import xml.etree.ElementTree
from fHDHR.tools import sub_el
class Lineup_XML():
endpoints = ["/lineup.xml"]
endpoint_name = "lineup_xml"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
base_url = request.url_root[:-1]
out = xml.etree.ElementTree.Element('Lineup')
station_list = self.fhdhr.device.channels.get_station_list(base_url)
for station_item in station_list:
program_out = sub_el(out, 'Program')
sub_el(program_out, 'GuideNumber', station_item['GuideNumber'])
sub_el(program_out, 'GuideName', station_item['GuideName'])
sub_el(program_out, 'URL', station_item['URL'])
fakefile = BytesIO()
fakefile.write(b'<?xml version="1.0" encoding="UTF-8"?>\n')
fakefile.write(xml.etree.ElementTree.tostring(out, encoding='UTF-8'))
lineup_xml = fakefile.getvalue()
return Response(status=200,
response=lineup_xml,
mimetype='application/xml')

View File

@ -1,17 +0,0 @@
from flask import send_from_directory
class Style_CSS():
endpoints = ["/style.css"]
endpoint_name = "style"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
return send_from_directory(self.fhdhr.config.dict["filedir"]["www_dir"],
'style.css')

View File

@ -1,29 +0,0 @@
from flask import Response, request, stream_with_context, abort
class Watch():
endpoints = ['/auto/<channel>']
endpoint_name = "auto"
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, channel, *args):
return self.get(channel, *args)
def get(self, channel, *args):
base_url = request.url_root[:-1]
stream_args = {
"channel": channel.replace('v', ''),
"method": request.args.get('method', default=self.fhdhr.config.dict["fhdhr"]["stream_type"], type=str),
"duration": request.args.get('duration', default=0, type=int),
"accessed": self.fhdhr.device.channels.get_fhdhr_stream_url(base_url, channel.replace('v', '')),
}
stream_args = self.fhdhr.device.watch.get_stream_info(stream_args)
if stream_args["channelUri"]:
if stream_args["method"] == "direct":
return Response(self.fhdhr.device.watch.get_stream(stream_args), content_type=stream_args["content_type"], direct_passthrough=True)
elif stream_args["method"] == "ffmpeg":
return Response(stream_with_context(self.fhdhr.device.watch.get_stream(stream_args)), mimetype="video/mpeg")
abort(503)

View File

@ -1,33 +0,0 @@
from .htmlerror import HTMLerror
from .page_elements import fHDHR_Page_Elements
from .index_html import Index_HTML
from .origin_html import Origin_HTML
from .cluster_html import Cluster_HTML
from .diagnostics_html import Diagnostics_HTML
from .streams_html import Streams_HTML
from .version_html import Version_HTML
from .guide_html import Guide_HTML
from .xmltv_html import xmlTV_HTML
class fHDHR_Pages():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.page_elements = fHDHR_Page_Elements(fhdhr)
self.index = Index_HTML(fhdhr, self.page_elements)
self.htmlerror = HTMLerror(fhdhr)
self.index = Index_HTML(fhdhr, self.page_elements)
self.origin = Origin_HTML(fhdhr, self.page_elements)
self.cluster = Cluster_HTML(fhdhr, self.page_elements)
self.diagnostics = Diagnostics_HTML(fhdhr, self.page_elements)
self.version = Version_HTML(fhdhr, self.page_elements)
self.guide = Guide_HTML(fhdhr, self.page_elements)
self.streams = Streams_HTML(fhdhr, self.page_elements)
self.xmltv = xmlTV_HTML(fhdhr, self.page_elements)

View File

@ -1,88 +0,0 @@
from flask import request
from io import StringIO
import urllib.parse
class Cluster_HTML():
endpoints = ["/cluster", "/cluster.html"]
endpoint_name = "cluster"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">Cluster</h4>")
fakefile.write("\n")
if self.fhdhr.config.dict["fhdhr"]["discovery_address"]:
fakefile.write("<div style=\"text-align: center;\">\n")
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=scan&redirect=%2Fcluster", "Force Scan"))
fakefile.write(" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" % ("/api/cluster?method=disconnect&redirect=%2Fcluster", "Disconnect"))
fakefile.write("</div><br>\n")
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th>Name</th>\n")
fakefile.write(" <th>Location</th>\n")
fakefile.write(" <th>Joined</th>\n")
fakefile.write(" <th>Options</th>\n")
fakefile.write(" </tr>\n")
fhdhr_list = self.fhdhr.device.cluster.get_list()
for location in list(fhdhr_list.keys()):
fakefile.write(" <tr>\n")
if location in list(self.fhdhr.device.cluster.cluster().keys()):
location_name = self.fhdhr.device.cluster.cluster()[location]["name"]
else:
try:
location_info_url = location + "/discover.json"
locatation_info_req = self.fhdhr.web.session.get(location_info_url)
location_info = locatation_info_req.json()
location_name = location_info["FriendlyName"]
except self.fhdhr.web.exceptions.ConnectionError:
self.fhdhr.logger.error("Unreachable: " + location)
fakefile.write(" <td>%s</td>\n" % (str(location_name)))
fakefile.write(" <td>%s</td>\n" % (str(location)))
fakefile.write(" <td>%s</td>\n" % (str(fhdhr_list[location]["Joined"])))
fakefile.write(" <td>\n")
fakefile.write(" <div>\n")
location_url_query = urllib.parse.quote(location)
fakefile.write(
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
(location, "Visit"))
if not fhdhr_list[location]["Joined"]:
fakefile.write(
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
("/api/cluster?method=add&location=" + location_url_query + "&redirect=%2Fcluster", "Add"))
else:
fakefile.write(
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
("/api/cluster?method=del&location=" + location_url_query + "&redirect=%2Fcluster", "Remove"))
fakefile.write(" </div>\n")
fakefile.write(" </td>\n")
fakefile.write(" </tr>\n")
else:
fakefile.write("<p style=\"text-align: center;\">Discovery Address must be set for SSDP/Cluster</p>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,46 +0,0 @@
from flask import request
from io import StringIO
class Diagnostics_HTML():
endpoints = ["/diagnostics", "/diagnostics.html"]
endpoint_name = "diagnostics"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.diagnostics_html = None
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
# a list of 2 part lists containing button information
button_list = [
["debug.json", "/api/debug"],
["device.xml", "device.xml"],
["discover.json", "discover.json"],
["lineup.json", "lineup.json"],
["lineup_status.json", "lineup_status.json"],
["cluster.json", "/api/cluster?method=get"]
]
for button_item in button_list:
button_label = button_item[0]
button_path = button_item[1]
fakefile.write("<div style=\"text-align: center;\">\n")
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
fakefile.write("</div>\n")
fakefile.write("\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,80 +0,0 @@
from flask import request
from io import StringIO
import datetime
from fHDHR.tools import humanized_time
class Guide_HTML():
endpoints = ["/guide", "/guide.html"]
endpoint_name = "guide"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
nowtime = datetime.datetime.utcnow()
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 id=\"mcetoc_1cdobsl3g0\" style=\"text-align: center;\"><span style=\"text-decoration: underline;\"><strong><em>What's On %s</em></strong></span></h4>\n" % friendlyname)
fakefile.write("\n")
# a list of 2 part lists containing button information
button_list = [
["Force xmlTV Update", "/api/xmltv?method=update&redirect=%2Fguide"],
]
fakefile.write("<div style=\"text-align: center;\">\n")
for button_item in button_list:
button_label = button_item[0]
button_path = button_item[1]
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
fakefile.write("</div>\n")
fakefile.write("\n")
fakefile.write("<table style=\"width:100%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th>Play</th>\n")
fakefile.write(" <th>Channel Name</th>\n")
fakefile.write(" <th>Channel Number</th>\n")
fakefile.write(" <th>Channel Thumbnail</th>\n")
fakefile.write(" <th>Content Title</th>\n")
fakefile.write(" <th>Content Thumbnail</th>\n")
fakefile.write(" <th>Content Description</th>\n")
fakefile.write(" <th>Content Remaining Time</th>\n")
fakefile.write(" </tr>\n")
for channel in self.fhdhr.device.epg.whats_on_allchans():
end_time = datetime.datetime.strptime(channel["listing"][0]["time_end"], '%Y%m%d%H%M%S +0000')
remaining_time = humanized_time(int((end_time - nowtime).total_seconds()))
play_url = ("/api/m3u?method=get&channel=%s\n" % (channel["number"]))
fakefile.write(" <tr>\n")
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % (play_url, "Play"))
fakefile.write(" <td>%s</td>\n" % (channel["name"]))
fakefile.write(" <td>%s</td>\n" % (channel["number"]))
fakefile.write(" <td><img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % (channel["thumbnail"], channel["name"]))
fakefile.write(" <td>%s</td>\n" % (channel["listing"][0]["title"]))
fakefile.write(" <td><img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % (channel["listing"][0]["thumbnail"], channel["listing"][0]["title"]))
fakefile.write(" <td>%s</td>\n" % (channel["listing"][0]["description"]))
fakefile.write(" <td>%s</td>\n" % (str(remaining_time)))
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
channel_guide_html = fakefile.getvalue()
return channel_guide_html

View File

@ -1,13 +0,0 @@
class HTMLerror():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def get_html_error(self, message):
htmlerror = """<html>
<head></head>
<body>
<h2>{}</h2>
</body>
</html>"""
return htmlerror.format(message)

View File

@ -1,55 +0,0 @@
from flask import request
from io import StringIO
class Index_HTML():
endpoints = ["/", "/index.html"]
endpoint_name = "root"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Status</h4>")
fakefile.write("\n")
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" </tr>\n")
total_channels = self.fhdhr.device.channels.get_station_total()
tuners_in_use = self.fhdhr.device.tuners.inuse_tuner_count()
max_tuners = self.fhdhr.device.tuners.max_tuners
tableguts = [
["Script Directory", str(self.fhdhr.config.dict["filedir"]["script_dir"])],
["Config File", str(self.fhdhr.config.config_file)],
["Cache Path", str(self.fhdhr.config.dict["filedir"]["cache_dir"])],
["Total Channels", str(total_channels)],
["Tuner Usage", "%s/%s" % (str(tuners_in_use), str(max_tuners))]
]
for guts in tableguts:
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % (guts[0]))
fakefile.write(" <td>%s</td>\n" % (guts[1]))
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,64 +0,0 @@
from flask import request
from io import StringIO
class Origin_HTML():
endpoints = ["/origin", "/origin.html"]
endpoint_name = "origin"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">%s Status</h4>" % (servicename))
fakefile.write("\n")
# a list of 2 part lists containing button information
button_list = [
["Force Channel Update", "/api/channels?method=scan&redirect=%2Forigin"],
]
fakefile.write("<div style=\"text-align: center;\">\n")
for button_item in button_list:
button_label = button_item[0]
button_path = button_item[1]
fakefile.write(" <p><button onclick=\"OpenLink('%s')\">%s</a></button></p>\n" % (button_path, button_label))
fakefile.write("</div>\n")
fakefile.write("\n")
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" </tr>\n")
origin_status_dict = self.fhdhr.device.channels.get_origin_status()
for key in list(origin_status_dict.keys()):
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % (str(key)))
fakefile.write(" <td>%s</td>\n" % (str(origin_status_dict[key])))
fakefile.write(" </tr>\n")
total_channels = self.fhdhr.device.channels.get_station_total()
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % ("Total Channels"))
fakefile.write(" <td>%s</td>\n" % (str(total_channels)))
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,87 +0,0 @@
class fHDHR_Page_Elements():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.location = self.fhdhr.device.cluster.location
def get(self, request):
return {"top": self.pagetop(request), "end": self.pageend(request)}
def pagetop(self, request):
friendlyname = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
servicename = str(self.fhdhr.config.dict["main"]["servicename"])
upper_part = [
"<!DOCTYPE html>",
"<html>",
"<head>",
"<title>%s</title>" % friendlyname,
"<meta name=\"viewport\" content=\"width=device-width, initial-scale=1.0\">",
"<style>",
"table, th, td {",
"border: 1px solid black;",
"}",
"</style>",
"<link href=\"style.css\" rel=\"stylesheet\">",
"</head>",
"<h1 style=\"text-align: center;\">",
"<span style=\"text-decoration: underline;\"><strong><em>%s</em></strong>" % friendlyname,
"</span>",
"<img class=\"pull-left\" src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">\n" % ("/favicon.ico", "fHDHR Logo"),
"</h1>"
"<br><br>",
"<h2>"
"<div>",
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/", "fHDHR"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/origin", servicename),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/guide", "Guide"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/cluster", "Cluster"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/streams", "Streams"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/xmltv", "xmltv"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/version", "Version"),
"<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % ("/diagnostics", "Diagnostics"),
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/xmltv?method=get&source=origin", "xmltv"),
"<a class=\"pull-right\" style=\"padding: 5px;\" href=\"%s\">%s</a>" % ("/api/m3u?method=get&channel=all", "m3u"),
"</div>",
"<hr align=\"center\" width=\"100%\">"
]
fhdhr_list = self.fhdhr.device.cluster.cluster()
locations = []
for location in list(fhdhr_list.keys()):
item_dict = {
"base_url": fhdhr_list[location]["base_url"],
"name": fhdhr_list[location]["name"]
}
if item_dict["base_url"] != self.location:
locations.append(item_dict)
if len(locations):
upper_part.append("<div>")
locations = sorted(locations, key=lambda i: i['name'])
for location in locations:
upper_part.append("<button class=\"pull-left\" onclick=\"OpenLink('%s')\">%s</a></button>" % (location["base_url"], location["name"]))
upper_part.append("</div>")
upper_part.append("<hr align=\"center\" width=\"100%\">")
retmessage = request.args.get('retmessage', default=None, type=str)
if retmessage:
upper_part.append("<p>%s</p>" % retmessage)
return upper_part
def pageend(self, request):
return [
"</html>",
"",
"<script>",
"function OpenLink(NewURL) {",
" window.open(NewURL, \"_self\");",
"}",
"</script>"
]

View File

@ -1,55 +0,0 @@
from flask import request
from io import StringIO
class Streams_HTML():
endpoints = ["/streams", "/streams.html"]
endpoint_name = "streams"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Streams</h4>")
fakefile.write("\n")
fakefile.write("<table style=\"width:100%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th>Tuner</th>\n")
fakefile.write(" <th>Status</th>\n")
fakefile.write(" <th>Channel</th>\n")
fakefile.write(" <th>Method</th>\n")
fakefile.write(" <th>Time Active</th>\n")
fakefile.write(" </tr>\n")
tuner_status = self.fhdhr.device.tuners.status()
for tuner in list(tuner_status.keys()):
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % (str(tuner)))
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["status"])))
if tuner_status[tuner]["status"] == "Active":
fakefile.write(" <td>%s<img src=\"%s\" alt=\"%s\" width=\"100\" height=\"100\">%s</td>\n" % (
tuner_status[tuner]["epg"]["name"], tuner_status[tuner]["epg"]["thumbnail"], tuner_status[tuner]["epg"]["name"], str(tuner_status[tuner]["epg"]["number"])))
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["method"])))
fakefile.write(" <td>%s</td>\n" % (str(tuner_status[tuner]["Play Time"])))
else:
fakefile.write(" <td>%s</td>\n" % "N/A")
fakefile.write(" <td>%s</td>\n" % "N/A")
fakefile.write(" <td>%s</td>\n" % "N/A")
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,53 +0,0 @@
import sys
from flask import request
from io import StringIO
class Version_HTML():
endpoints = ["/version", "/version.html"]
endpoint_name = "version"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">fHDHR Version Information</h4>")
fakefile.write("\n")
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" <th></th>\n")
fakefile.write(" </tr>\n")
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % ("fHDHR"))
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.version)))
fakefile.write(" </tr>\n")
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % ("Python"))
fakefile.write(" <td>%s</td>\n" % (str(sys.version)))
fakefile.write(" </tr>\n")
if self.fhdhr.config.dict["fhdhr"]["stream_type"] == "ffmpeg":
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % ("ffmpeg"))
fakefile.write(" <td>%s</td>\n" % (str(self.fhdhr.config.dict["ffmpeg"]["version"])))
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

View File

@ -1,56 +0,0 @@
from flask import request
from io import StringIO
class xmlTV_HTML():
endpoints = ["/xmltv"]
endpoint_name = "xmltv"
def __init__(self, fhdhr, page_elements):
self.fhdhr = fhdhr
self.page_elements = page_elements
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
fakefile = StringIO()
page_elements = self.page_elements.get(request)
for line in page_elements["top"]:
fakefile.write(line + "\n")
fakefile.write("<h4 style=\"text-align: center;\">fHDHR xmltv Options</h4>")
fakefile.write("\n")
fakefile.write("<table class=\"center\" style=\"width:50%\">\n")
fakefile.write(" <tr>\n")
fakefile.write(" <th>Version</th>\n")
fakefile.write(" <th>Link</th>\n")
fakefile.write(" <th>Options</th>\n")
fakefile.write(" </tr>\n")
for epg_method in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
if epg_method not in [None, "None"]:
epg_method_name = epg_method
if epg_method == "origin":
epg_method_name = self.fhdhr.config.dict["main"]["dictpopname"]
fakefile.write(" <tr>\n")
fakefile.write(" <td>%s</td>\n" % (epg_method_name))
fakefile.write(" <td><a href=\"%s\">%s</a>\n" % ("/api/xmltv?method=get&source=" + epg_method, epg_method_name))
fakefile.write(" <td>\n")
fakefile.write(" <div>\n")
fakefile.write(
" <button onclick=\"OpenLink('%s')\">%s</a></button>\n" %
("/api/xmltv?method=update&source=" + epg_method + "&redirect=%2Fxmltv", "Update"))
fakefile.write(" </div>\n")
fakefile.write(" </td>\n")
fakefile.write(" </tr>\n")
for line in page_elements["end"]:
fakefile.write(line + "\n")
return fakefile.getvalue()

36
fHDHR/logger/__init__.py Normal file
View File

@ -0,0 +1,36 @@
import os
import logging
class Logger():
def __init__(self, settings):
self.config = settings
log_level = self.config.dict["logging"]["level"].upper()
# Create a custom logger
logging.basicConfig(format='%(name)s - %(levelname)s - %(message)s', level=log_level)
self.logger = logging.getLogger('fHDHR')
log_file = os.path.join(self.config.internal["paths"]["logs_dir"], 'fHDHR.log')
# Create handlers
# c_handler = logging.StreamHandler()
f_handler = logging.FileHandler(log_file)
# c_handler.setLevel(log_level)
f_handler.setLevel(log_level)
# Create formatters and add it to handlers
# c_format = logging.Formatter('%(name)s - %(levelname)s - %(message)s')
f_format = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
# c_handler.setFormatter(c_format)
f_handler.setFormatter(f_format)
# Add handlers to the logger
# logger.addHandler(c_handler)
self.logger.addHandler(f_handler)
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.logger, name):
return eval("self.logger.%s" % name)

View File

@ -1,89 +0,0 @@
from .origin_service import OriginService
from .origin_channels import OriginChannels
from .origin_epg import OriginEPG
import fHDHR.exceptions
class OriginEPG_StandIN():
def __init__(self):
pass
def update_epg(self, channels):
return {}
class OriginChannels_StandIN():
def __init__(self):
pass
def get_channels(self):
return []
def get_channel_stream(self, chandict, allchandict):
return [{"number": chandict["number"], "stream_url": None}], False
class OriginServiceWrapper():
def __init__(self, settings, logger, web, db):
self.config = settings
self.logger = logger
self.web = web
self.servicename = settings.dict["main"]["servicename"]
self.setup_success = None
self.setup()
def setup(self):
try:
self.origin = OriginService(self.config, self.logger, self.web)
self.setup_success = True
self.logger.info("%s Setup Success" % self.servicename)
except fHDHR.exceptions.OriginSetupError as e:
self.logger.error(e)
self.setup_success = False
if self.setup_success:
self.channels = OriginChannels(self.config, self.origin, self.logger, self.web)
self.epg = OriginEPG(self.config, self.logger, self.web)
else:
self.channels = OriginChannels_StandIN()
self.epg = OriginEPG_StandIN()
def get_channels(self):
return self.channels.get_channels()
def get_channel_stream(self, chandict, allchandict):
return self.channels.get_channel_stream(chandict, allchandict)
def update_epg(self, channels):
return self.epg.update_epg(channels)
def get_status_dict(self):
if self.setup_success:
status_dict = {
"Setup": "Success",
}
try:
full_status_dict = self.origin.get_status_dict()
for status_key in list(full_status_dict.keys()):
status_dict[status_key] = full_status_dict[status_key]
return status_dict
except AttributeError:
return status_dict
else:
return {
"Setup": "Failed",
}
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.origin, name):
return eval("self.origin." + name)
elif hasattr(self.channels, name):
return eval("self.channels." + name)

View File

@ -1,58 +0,0 @@
import xmltodict
import json
class OriginChannels():
def __init__(self, settings, origin, logger, web):
self.config = settings
self.origin = origin
self.logger = logger
self.web = web
def get_channels(self):
data_url = ('%s%s:%s/service?method=channel.list&sid=%s' %
("https://" if self.config.dict["origin"]["ssl"] else "http://",
self.config.dict["origin"]["address"],
str(self.config.dict["origin"]["port"]),
self.origin.sid
))
data_req = self.origin.web.session.get(data_url)
data_dict = xmltodict.parse(data_req.content)
if 'channels' not in list(data_dict['rsp'].keys()):
self.logger.error("Could not retrieve channel list")
return []
channel_o_list = data_dict['rsp']['channels']['channel']
channel_list = []
for c in channel_o_list:
dString = json.dumps(c)
channel_dict = eval(dString)
clean_station_item = {
"name": channel_dict["name"],
"callsign": channel_dict["name"],
"number": channel_dict["formatted-number"],
"id": channel_dict["id"],
}
channel_list.append(clean_station_item)
return channel_list
def get_channel_stream(self, chandict, allchandict):
caching = True
streamlist = []
streamdict = {}
streamurl = ('%s%s:%s/live?channel=%s&client=%s' %
("https://" if self.config.dict["origin"]["ssl"] else "http://",
self.config.dict["origin"]["address"],
str(self.config.dict["origin"]["port"]),
str(chandict["number"]),
str(chandict["number"]),
))
streamdict = {"number": chandict["number"], "stream_url": streamurl}
streamlist.append(streamdict)
return streamlist, caching

View File

@ -1,71 +0,0 @@
import xmltodict
import hashlib
import fHDHR.tools
import fHDHR.exceptions
class OriginService():
def __init__(self, settings, logger, web):
self.config = settings
self.logger = logger
self.web = web
self.login()
def login(self):
self.logger.info("Logging into NextPVR")
self.sid = self.get_sid()
if not self.sid:
raise fHDHR.exceptions.OriginSetupError("NextPVR Login Failed")
else:
self.logger.info("NextPVR Login Success")
self.config.write(self.config.dict["main"]["dictpopname"], 'sid', self.sid)
def get_sid(self):
if self.config.dict["origin"]["sid"]:
return self.config.dict["origin"]["sid"]
initiate_url = ('%s%s:%s/service?method=session.initiate&ver=1.0&device=fhdhr' %
("https://" if self.config.dict["origin"]["ssl"] else "http://",
self.config.dict["origin"]["address"],
str(self.config.dict["origin"]["port"]),
))
initiate_req = self.web.session.get(initiate_url)
initiate_dict = xmltodict.parse(initiate_req.content)
sid = initiate_dict['rsp']['sid']
salt = initiate_dict['rsp']['salt']
md5PIN = hashlib.md5(str(self.config.dict["origin"]['pin']).encode('utf-8')).hexdigest()
string = ':%s:%s' % (md5PIN, salt)
clientKey = hashlib.md5(string.encode('utf-8')).hexdigest()
login_url = ('%s%s:%s/service?method=session.login&sid=%s&md5=%s' %
("https://" if self.config.dict["origin"]["ssl"] else "http://",
self.config.dict["origin"]["address"],
str(self.config.dict["origin"]["port"]),
sid,
clientKey
))
login_req = self.web.session.get(login_url)
login_dict = xmltodict.parse(login_req.content)
loginsuccess = None
if login_dict['rsp']['@stat'] == "ok":
if login_dict['rsp']['allow_watch'] == "true":
loginsuccess = sid
return loginsuccess
def get_status_dict(self):
nextpvr_address = ('%s%s:%s' %
("https://" if self.config.dict["origin"]["ssl"] else "http://",
self.config.dict["origin"]["address"],
str(self.config.dict["origin"]["port"]),
))
ret_status_dict = {
"Login": "Success",
"Address": nextpvr_address,
}
return ret_status_dict

48
fHDHR/origins/__init__.py Normal file
View File

@ -0,0 +1,48 @@
import fHDHR.exceptions
class Origin_StandIN():
def __init__(self):
self.setup_success = False
def get_channels(self):
return []
def get_channel_stream(self, chandict, stream_args):
return None
class Origins():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.origins_dict = {}
self.origin_selfadd()
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"] and self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod_type"] == "origin":
self.fhdhr.plugins.plugins[plugin_name].plugin_utils.origin = self.origins_dict[self.fhdhr.plugins.plugins[plugin_name].manifest["tagged_mod"].lower()]
@property
def valid_origins(self):
return [origin for origin in list(self.origins_dict.keys())]
def origin_selfadd(self):
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].type == "origin":
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
try:
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
self.origins_dict[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(plugin_utils)
self.fhdhr.logger.info("%s Setup Success" % method)
self.origins_dict[method].setup_success = True
except fHDHR.exceptions.OriginSetupError as e:
self.fhdhr.logger.error(e)
self.origins_dict[method] = Origin_StandIN()
if not hasattr(self.origins_dict[method], 'tuners'):
self.origins_dict[method].tuners = 4
if not hasattr(self.origins_dict[method], 'stream_method'):
self.origins_dict[method].stream_method = self.fhdhr.config.dict["streaming"]["method"]

250
fHDHR/plugins/__init__.py Normal file
View File

@ -0,0 +1,250 @@
import os
import imp
import json
class Plugin_DB():
def __init__(self, db, name):
self._db = db
self.name = name
self.namespace = name.lower()
# fhdhr
def set_fhdhr_value(self, pluginitem, key, value, namespace="default"):
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
return
def get_fhdhr_value(self, pluginitem, key, namespace="default"):
return self._db.get_fhdhr_value(pluginitem, key, namespace=namespace.lower())
def delete_fhdhr_value(self, pluginitem, key, namespace="default"):
print("%s plugin is not allowed write access to fhdhr db namespaces." % self.name)
return
# Plugin
def set_plugin_value(self, pluginitem, key, value, namespace=None):
if not namespace:
namespace = self.namespace
elif namespace.lower() != self.namespace:
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
return
return self._db.set_plugin_value(pluginitem, key, value, namespace=self.namespace)
def get_plugin_value(self, pluginitem, key, namespace=None):
if not namespace:
namespace = self.namespace
return self._db.get_plugin_value(pluginitem, key, namespace=namespace.lower())
def delete_plugin_value(self, pluginitem, key, namespace=None):
if not namespace:
namespace = self.namespace
elif namespace.lower() != self.namespace:
print("%s plugin is not allowed write access to %s db namespace." % (self.name, namespace))
return
return self._db.delete_plugin_value(pluginitem, key, namespace=self.namespace)
class Plugin_Config():
def __init__(self, config, name):
self._config = config
self.name = name
self.namespace = name.lower()
@property
def dict(self):
return self._config.dict.copy()
@property
def internal(self):
return self._config.internal.copy()
@property
def conf_default(self):
return self._config.conf_default.copy()
def write(self, key, value, namespace=None):
if not namespace:
namespace = self.namespace
elif str(namespace).lower() != self.namespace:
print("%s plugin is not allowed write access to fhdhr config namespaces." % self.name)
return
return self._config.write(key, value, self.namespace)
class Plugin_Utils():
def __init__(self, config, logger, db, plugin_name, plugin_manifest, modname):
self.config = Plugin_Config(config, plugin_manifest["name"])
self.db = Plugin_DB(db, plugin_manifest["name"])
self.logger = logger
self.namespace = plugin_manifest["name"].lower()
self.plugin_name = plugin_name
self.plugin_manifest = plugin_manifest
self.origin = None
class Plugin():
def __init__(self, config, logger, db, plugin_name, plugin_path, plugin_conf, plugin_manifest):
self.config = config
self.db = db
self.logger = logger
# Gather Info about Plugin
self.plugin_name = plugin_name
self.modname = os.path.basename(plugin_path)
self.path = plugin_path
self.module_type = imp.PKG_DIRECTORY
self.multi_plugin = (self.plugin_name != self.modname)
self.default_conf = plugin_conf
self.manifest = plugin_manifest
if self.multi_plugin:
self.plugin_dict_name = "%s_%s" % (plugin_name, self.modname)
else:
self.plugin_dict_name = plugin_name
self.plugin_utils = Plugin_Utils(config, logger, db, plugin_name, plugin_manifest, self.modname)
# Load the module
self._module = self._load()
def setup(self):
if self.type == "alt_epg":
self.config.register_valid_epg_method(self.name, self.plugin_dict_name)
elif self.type == "alt_stream":
self.config.register_valid_streaming_method(self.name, self.plugin_dict_name)
elif self.type == "web":
self.config.register_web_path(self.manifest["name"], self.path, self.plugin_dict_name)
if self.has_setup():
self._module.setup(self)
def has_setup(self):
return hasattr(self._module, 'setup')
def _load(self):
description = ('', '', self.module_type)
mod = imp.load_module(self.plugin_dict_name, None, self.path, description)
return mod
@property
def name(self):
return self.manifest["name"]
@property
def version(self):
return self.manifest["version"]
@property
def type(self):
return self.manifest["type"]
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if name == "Plugin_OBJ":
return self._module.Plugin_OBJ
class PluginsHandler():
def __init__(self, settings):
self.config = settings
self.plugins = {}
self.found_plugins = []
self.found_plugins_conf = []
self.list_plugins()
def setup(self):
for plugin_name in list(self.plugins.keys()):
self.plugins[plugin_name].setup()
def load_plugin_configs(self):
for file_item_path in self.found_plugins_conf:
self.config.import_conf_json(file_item_path)
def list_plugins(self):
for directory in self.config.internal["paths"]["plugins_dir"]:
base = os.path.abspath(directory)
for filename in os.listdir(base):
abspath = os.path.join(base, filename)
if os.path.isdir(abspath):
plugin_conf = []
for subfilename in os.listdir(abspath):
subabspath = os.path.join(abspath, subfilename)
if subfilename.endswith("_conf.json"):
plugin_conf.append(subabspath)
self.found_plugins_conf.append(subabspath)
# Plugin/multi-plugin must have a basic manifest json
conffilepath = os.path.join(abspath, 'plugin.json')
if os.path.isfile(conffilepath):
plugin_manifest = json.load(open(conffilepath, 'r'))
for plugin_man_item in ["name", "version", "type"]:
if plugin_man_item not in list(plugin_manifest.keys()):
plugin_manifest[plugin_man_item] = None
self.config.register_version(os.path.basename(filename), plugin_manifest["version"], "plugin")
if plugin_manifest["type"] == "origin":
self.config.register_valid_origin_method(plugin_manifest["name"])
plugin_import_print_string = "Found %s type plugin: %s %s. " % (plugin_manifest["type"], plugin_manifest["name"], plugin_manifest["version"])
# Warn for multiple origins
if plugin_manifest["type"] == "origin" and len([plugin_name for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins if plugin_manifest["type"] == "origin"]):
plugin_import_print_string += " ImportWarning: Only one Origin Allowed."
if not any(plugin_manifest[plugin_item] for plugin_item in ["name", "version", "type"]):
plugin_import_print_string += " ImportWarning: Missing PLUGIN_* Value."
else:
# Single Plugin
if os.path.isfile(os.path.join(abspath, '__init__.py')):
plugin_manifest["tagged_mod"] = None
plugin_manifest["tagged_mod_type"] = None
self.found_plugins.append((os.path.basename(filename), abspath, plugin_conf, plugin_manifest))
else:
# Multi-Plugin
for subfilename in os.listdir(abspath):
subabspath = os.path.join(abspath, subfilename)
if os.path.isdir(subabspath):
subconffilepath = os.path.join(subabspath, 'plugin.json')
if os.path.isfile(subconffilepath):
subplugin_manifest = json.load(open(subconffilepath, 'r'))
for subplugin_man_item in ["name", "version", "type"]:
if subplugin_man_item not in list(subplugin_manifest.keys()):
subplugin_manifest[subplugin_man_item] = plugin_manifest[subplugin_man_item]
else:
subplugin_manifest = plugin_manifest
subplugin_manifest["tagged_mod"] = None
subplugin_manifest["tagged_mod_type"] = None
if plugin_manifest["type"] != subplugin_manifest["type"]:
subplugin_manifest["tagged_mod"] = plugin_manifest["name"]
subplugin_manifest["tagged_mod_type"] = plugin_manifest["type"]
if os.path.isfile(os.path.join(subabspath, '__init__.py')):
self.found_plugins.append((os.path.basename(filename), subabspath, plugin_conf, subplugin_manifest))
print(plugin_import_print_string)
self.load_plugin_configs()
def load_plugins(self, logger, db):
self.logger = logger
self.db = db
for plugin_name, plugin_path, plugin_conf, plugin_manifest in self.found_plugins:
plugin_item = Plugin(self.config, self.logger, self.db, plugin_name, plugin_path, plugin_conf, plugin_manifest)
self.plugins[plugin_item.plugin_dict_name] = plugin_item

View File

@ -1,3 +1,5 @@
import os
import re
import ast
import requests
import xml.etree.ElementTree
@ -6,8 +8,32 @@ UNARY_OPS = (ast.UAdd, ast.USub)
BINARY_OPS = (ast.Add, ast.Sub, ast.Mult, ast.Div, ast.Mod)
def sub_el(parent, name, text=None, **kwargs):
el = xml.etree.ElementTree.SubElement(parent, name, **kwargs)
def channel_sort(channel_list):
"""Take a list of channel number strings and sort the Numbers and SubNumbers"""
chan_dict_list_split = {}
for number in channel_list:
try:
subnumber = number.split(".")[1]
except IndexError:
subnumber = None
prinumber = number.split(".")[0]
chan_dict_list_split[number] = {"number": prinumber, "subnumber": subnumber}
return sorted(chan_dict_list_split, key=lambda i: (int(chan_dict_list_split[i]['number']), int(chan_dict_list_split[i]['subnumber'] or 0)))
def is_docker():
path = "/proc/self/cgroup"
if not os.path.isfile(path):
return False
with open(path) as f:
for line in f:
if re.match("\d+:[\w=]+:/docker(-[ce]e)?/\w+", line):
return True
return False
def sub_el(parent, sub_el_item_name, text=None, **kwargs):
el = xml.etree.ElementTree.SubElement(parent, sub_el_item_name, **kwargs)
if text:
el.text = text
return el
@ -76,6 +102,14 @@ def hours_between_datetime(first_time, later_time):
return (timebetween.total_seconds() / 60 / 60)
def humanized_filesize(size, decimal_places=2):
for unit in ['B', 'KiB', 'MiB', 'GiB', 'TiB', 'PiB', 'EiB', 'ZiB', 'YiB']:
if size < 1024.0 or unit == 'YiB':
break
size /= 1024.0
return f"{size:.{decimal_places}f} {unit}"
def humanized_time(countdownseconds):
time = float(countdownseconds)
if time == 0:
@ -99,9 +133,9 @@ def humanized_time(countdownseconds):
if currenttimevar > 1:
timetype = str(x+"s")
if displaymsg:
displaymsg = str(displaymsg + " " + str(int(currenttimevar)) + " " + timetype)
displaymsg = "%s %s %s" % (displaymsg, int(currenttimevar), timetype)
else:
displaymsg = str(str(int(currenttimevar)) + " " + timetype)
displaymsg = "%s %s" % (int(currenttimevar), timetype)
if not displaymsg:
return "just now"
return displaymsg
@ -113,3 +147,8 @@ class WebReq():
def __init__(self):
self.session = requests.Session()
self.exceptions = requests.exceptions
def __getattr__(self, name):
''' will only get called for undefined attributes '''
if hasattr(self.session, name):
return eval("self.session.%s" % name)

229
fHDHR_web/__init__.py Normal file
View File

@ -0,0 +1,229 @@
from gevent.pywsgi import WSGIServer
from flask import Flask, request, session
import threading
import uuid
from .pages import fHDHR_Pages
from .files import fHDHR_Files
from .brython import fHDHR_Brython
from .api import fHDHR_API
fHDHR_web_VERSION = "v0.8.1-beta"
class fHDHR_HTTP_Server():
app = None
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.template_folder = fhdhr.config.internal["paths"]["www_templates_dir"]
self.fhdhr.logger.info("Loading Flask.")
self.fhdhr.app = Flask("fHDHR", template_folder=self.template_folder)
self.instance_id = str(uuid.uuid4())
# Allow Internal API Usage
self.fhdhr.app.testing = True
self.fhdhr.api.client = self.fhdhr.app.test_client()
# Set Secret Key For Sessions
self.fhdhr.app.secret_key = self.fhdhr.config.dict["fhdhr"]["friendlyname"]
self.route_list = {}
self.endpoints_obj = {}
self.endpoints_obj["pages"] = fHDHR_Pages(fhdhr)
self.endpoints_obj["files"] = fHDHR_Files(fhdhr)
self.endpoints_obj["brython"] = fHDHR_Brython(fhdhr)
self.endpoints_obj["api"] = fHDHR_API(fhdhr)
self.selfadd_web_plugins()
for endpoint_type in list(self.endpoints_obj.keys()):
self.fhdhr.logger.info("Loading HTTP %s Endpoints." % endpoint_type)
self.add_endpoints(endpoint_type)
self.fhdhr.app.before_request(self.before_request)
self.fhdhr.app.after_request(self.after_request)
self.fhdhr.app.before_first_request(self.before_first_request)
self.fhdhr.threads["flask"] = threading.Thread(target=self.run)
def selfadd_web_plugins(self):
for plugin_name in list(self.fhdhr.plugins.plugins.keys()):
if self.fhdhr.plugins.plugins[plugin_name].type == "web":
method = self.fhdhr.plugins.plugins[plugin_name].name.lower()
plugin_utils = self.fhdhr.plugins.plugins[plugin_name].plugin_utils
try:
self.endpoints_obj[method] = self.fhdhr.plugins.plugins[plugin_name].Plugin_OBJ(self.fhdhr, plugin_utils)
except Exception as e:
print(e)
def start(self):
self.fhdhr.logger.info("Flask HTTP Thread Starting")
self.fhdhr.threads["flask"].start()
def stop(self):
self.fhdhr.logger.info("Flask HTTP Thread Stopping")
self.http.stop()
def before_first_request(self):
self.fhdhr.logger.info("HTTP Server Online.")
def before_request(self):
session["session_id"] = str(uuid.uuid4())
session["instance_id"] = self.instance_id
session["route_list"] = self.route_list
session["user_agent"] = request.headers.get('User-Agent')
session["is_internal_api"] = self.detect_internal_api(request)
if session["is_internal_api"]:
self.fhdhr.logger.debug("Client is using internal API call.")
session["is_mobile"] = self.detect_mobile(request)
if session["is_mobile"]:
self.fhdhr.logger.debug("Client is a mobile device.")
session["is_plexmediaserver"] = self.detect_plexmediaserver(request)
if session["is_plexmediaserver"]:
self.fhdhr.logger.debug("Client is a Plex Media Server.")
session["deviceauth"] = self.detect_plexmediaserver(request)
session["tuner_used"] = None
session["restart"] = False
self.fhdhr.logger.debug("Client %s requested %s Opening" % (request.method, request.path))
def after_request(self, response):
# Close Tuner if it was in use, and did not close already
# if session["tuner_used"] is not None:
# tuner = self.fhdhr.device.tuners.tuners[str(session["tuner_used"])]
# if tuner.tuner_lock.locked():
# self.fhdhr.logger.info("Shutting down Tuner #%s after Request." % session["tuner_used"])
# tuner.close()
self.fhdhr.logger.debug("Client %s requested %s Closing" % (request.method, request.path))
if not session["restart"]:
return response
else:
return self.stop()
def detect_internal_api(self, request):
user_agent = request.headers.get('User-Agent')
if not user_agent:
return False
elif str(user_agent).lower().startswith("fhdhr"):
return True
else:
return False
def detect_deviceauth(self, request):
return request.args.get('DeviceAuth', default=None, type=str)
def detect_mobile(self, request):
user_agent = request.headers.get('User-Agent')
phones = ["iphone", "android", "blackberry"]
if not user_agent:
return False
elif any(phone in user_agent.lower() for phone in phones):
return True
else:
return False
def detect_plexmediaserver(self, request):
user_agent = request.headers.get('User-Agent')
if not user_agent:
return False
elif str(user_agent).lower().startswith("plexmediaserver"):
return True
else:
return False
def add_endpoints(self, index_name):
item_list = [x for x in dir(self.endpoints_obj[index_name]) if self.isapath(x)]
endpoint_main = self.endpoints_obj[index_name]
endpoint_main.fhdhr.version # dummy line
for item in item_list:
endpoints = eval("endpoint_main.%s.%s" % (item, "endpoints"))
if isinstance(endpoints, str):
endpoints = [endpoints]
handler = eval("endpoint_main.%s" % item)
endpoint_name = eval("endpoint_main.%s.%s" % (item, "endpoint_name"))
try:
endpoint_methods = eval("endpoint_main.%s.%s" % (item, "endpoint_methods"))
except AttributeError:
endpoint_methods = ['GET']
try:
endpoint_access_level = eval("endpoint_main.%s.%s" % (item, "endpoint_access_level"))
except AttributeError:
endpoint_access_level = 0
try:
pretty_name = eval("endpoint_main.%s.%s" % (item, "pretty_name"))
except AttributeError:
pretty_name = endpoint_name
try:
endpoint_category = eval("endpoint_main.%s.%s" % (item, "endpoint_category"))
except AttributeError:
endpoint_category = index_name
try:
endpoint_default_parameters = eval("endpoint_main.%s.%s" % (item, "endpoint_default_parameters"))
except AttributeError:
endpoint_default_parameters = {}
self.fhdhr.logger.debug("Adding endpoint %s available at %s with %s methods." % (endpoint_name, ",".join(endpoints), ",".join(endpoint_methods)))
if endpoint_category not in list(self.route_list.keys()):
self.route_list[endpoint_category] = {}
if endpoint_name not in list(self.route_list[endpoint_category].keys()):
self.route_list[endpoint_category][endpoint_name] = {}
self.route_list[endpoint_category][endpoint_name]["name"] = endpoint_name
self.route_list[endpoint_category][endpoint_name]["endpoints"] = endpoints
self.route_list[endpoint_category][endpoint_name]["endpoint_methods"] = endpoint_methods
self.route_list[endpoint_category][endpoint_name]["endpoint_access_level"] = endpoint_access_level
self.route_list[endpoint_category][endpoint_name]["endpoint_default_parameters"] = endpoint_default_parameters
self.route_list[endpoint_category][endpoint_name]["pretty_name"] = pretty_name
self.route_list[endpoint_category][endpoint_name]["endpoint_category"] = endpoint_category
for endpoint in endpoints:
self.add_endpoint(endpoint=endpoint,
endpoint_name=endpoint_name,
handler=handler,
methods=endpoint_methods)
def isapath(self, item):
not_a_page_list = ["fhdhr", "plugin_utils"]
if item in not_a_page_list:
return False
elif item.startswith("__") and item.endswith("__"):
return False
else:
return True
def add_endpoint(self, endpoint=None, endpoint_name=None, handler=None, methods=['GET']):
self.fhdhr.app.add_url_rule(endpoint, endpoint_name, handler, methods=methods)
def run(self):
self.http = WSGIServer(self.fhdhr.api.address_tuple,
self.fhdhr.app.wsgi_app,
log=self.fhdhr.logger.logger,
error_log=self.fhdhr.logger.logger)
try:
self.http.serve_forever()
self.stop()
except AttributeError:
self.fhdhr.logger.info("HTTP Server Offline")

40
fHDHR_web/api/__init__.py Normal file
View File

@ -0,0 +1,40 @@
from .root_url import Root_URL
from .startup_tasks import Startup_Tasks
from .settings import Settings
from .channels import Channels
from .xmltv import xmlTV
from .m3u import M3U
from .w3u import W3U
from .epg import EPG
from .tuners import Tuners
from .debug import Debug_JSON
from .plugins import Plugins_JSON
from .route_list import Route_List
from .images import Images
class fHDHR_API():
def __init__(self, fhdhr):
self.fhdhr = fhdhr
self.root_url = Root_URL(fhdhr)
self.startup_tasks = Startup_Tasks(fhdhr)
self.settings = Settings(fhdhr)
self.channels = Channels(fhdhr)
self.xmltv = xmlTV(fhdhr)
self.m3u = M3U(fhdhr)
self.w3u = W3U(fhdhr)
self.epg = EPG(fhdhr)
self.tuners = Tuners(fhdhr)
self.debug = Debug_JSON(fhdhr)
self.plugins = Plugins_JSON(fhdhr)
self.route_list = Route_List(fhdhr)
self.images = Images(fhdhr)

172
fHDHR_web/api/channels.py Normal file
View File

@ -0,0 +1,172 @@
from flask import request, redirect, Response, abort
import urllib.parse
import json
from fHDHR.tools import channel_sort
class Channels():
endpoints = ["/api/channels"]
endpoint_name = "api_channels"
endpoint_methods = ["GET", "POST"]
endpoint_default_parameters = {
"method": "get"
}
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
method = request.args.get('method', default=None, type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
origin_methods = self.fhdhr.origins.valid_origins
origin = request.args.get('origin', default=None, type=str)
if origin and origin not in origin_methods:
return "%s Invalid channels origin" % origin
if method == "get":
channels_info = {}
if not origin:
origin_list = origin_methods
else:
origin_list = [origin]
for origin_item in origin_list:
channels_info[origin_item] = {}
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin=origin_item)]:
channel_obj = self.fhdhr.device.channels.list[origin_item][fhdhr_id]
channel_dict = channel_obj.dict.copy()
channel_dict["m3u_url"] = channel_obj.api_m3u_url
channel_dict["stream_url"] = channel_obj.api_stream_url
channels_info[origin_item][channel_obj.number] = channel_dict
# Sort the channels
sorted_channel_list = channel_sort(list(channels_info[origin_item].keys()))
sorted_chan_guide = []
for channel in sorted_channel_list:
sorted_chan_guide.append(channels_info[origin_item][channel])
channels_info[origin_item] = sorted_chan_guide
channels_info_json = json.dumps(channels_info, indent=4)
return Response(status=200,
response=channels_info_json,
mimetype='application/json')
elif method == "favorite":
channel = request.args.get('channel', default=None, type=str)
if not channel:
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
else:
return "%s Falied" % method
if channel.startstwith(tuple(["+", "-", "x"])):
channel_method = channel[0]
channel_number = channel[1:]
if str(channel_number) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
response = Response("Not Found", status=404)
response.headers["X-fHDHR-Error"] = "801 - Unknown Channel"
self.fhdhr.logger.error(response.headers["X-fHDHR-Error"])
abort(response)
if channel_method == "+":
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
elif channel_method == "-":
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, channel_method, origin)
elif channel_method == "x":
self.fhdhr.device.channels.set_channel_enablement("number", channel_number, "toggle", origin)
else:
self.fhdhr.logger.warning("Unknown favorite command %s" % request.args['favorite'])
return abort(200, "Not a valid favorite command")
elif method in ["enable", "disable"]:
channel = request.args.get('channel', default=None, type=str)
if channel == "all":
self.fhdhr.device.channels.set_channel_enablement_all(method, origin)
elif not channel or str(channel) not in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Failed" % method)))
else:
return "%s Falied" % method
else:
self.fhdhr.device.channels.set_channel_enablement("number", channel, method, origin)
elif method == "update":
channel_id = request.form.get('id', None)
updatedict = {}
for key in list(request.form.keys()):
if key != "id":
if key in ["name", "callsign", "thumbnail"]:
updatedict[key] = str(request.form.get(key))
elif key in ["number"]:
number = str(request.form.get(key))
if "." in number:
updatedict["subnumber"] = number.split(".")[1]
updatedict["number"] = number.split(".")[0]
else:
updatedict["number"] = number
elif key in ["enabled"]:
confvalue = request.form.get(key)
if str(confvalue).lower() in ["false"]:
confvalue = False
elif str(confvalue).lower() in ["true"]:
confvalue = True
updatedict[key] = confvalue
elif key in ["favorite", "HD"]:
updatedict[key] = int(request.form.get(key))
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
elif method == "modify":
channels_list = json.loads(request.form.get('channels', []))
for channel in channels_list:
updatedict = {}
for key in list(channel.keys()):
if key != "id":
if key in ["name", "callsign", "thumbnail"]:
updatedict[key] = str(channel[key])
elif key in ["number"]:
number = str(channel[key])
if "." in number:
updatedict["subnumber"] = number.split(".")[1]
updatedict["number"] = number.split(".")[0]
else:
updatedict["number"] = number
elif key in ["enabled"]:
confvalue = channel[key]
if str(confvalue).lower() in ["false"]:
confvalue = False
elif str(confvalue).lower() in ["true"]:
confvalue = True
updatedict[key] = confvalue
elif key in ["favorite", "HD"]:
updatedict[key] = int(channel[key])
else:
channel_id = str(channel[key])
self.fhdhr.device.channels.set_channel_status("id", channel_id, updatedict, origin)
elif method == "scan":
self.fhdhr.device.channels.get_channels(forceupdate=True, origin=origin)
else:
return "Invalid Method"
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
else:
if method == "scan":
return redirect('/lineup_status.json')
else:
return "%s Success" % method

View File

@ -5,6 +5,7 @@ import json
class Debug_JSON():
endpoints = ["/api/debug"]
endpoint_name = "api_debug"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
@ -18,11 +19,16 @@ class Debug_JSON():
debugjson = {
"base_url": base_url,
"total channels": self.fhdhr.device.channels.get_station_total(),
"tuner status": self.fhdhr.device.tuners.status(),
}
cluster_json = json.dumps(debugjson, indent=4)
for origin in list(self.fhdhr.origins.origins_dict.keys()):
debugjson[origin] = {
"tuner status": self.fhdhr.device.tuners.status(origin),
"total channels": len(list(self.fhdhr.device.channels.list[origin].keys()))
}
debug_json = json.dumps(debugjson, indent=4)
return Response(status=200,
response=cluster_json,
response=debug_json,
mimetype='application/json')

132
fHDHR_web/api/epg.py Normal file
View File

@ -0,0 +1,132 @@
from flask import Response, request, redirect
import urllib.parse
import json
import datetime
from fHDHR.tools import humanized_time, channel_sort
class EPG():
"""Methods to create xmltv.xml"""
endpoints = ["/api/epg"]
endpoint_name = "api_epg"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
method = request.args.get('method', default="get", type=str)
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["def_method"], type=str)
if source not in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
return "%s Invalid epg method" % source
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "get":
epgdict = self.fhdhr.device.epg.get_epg(source)
if source in self.fhdhr.origins.valid_origins:
epgdict = epgdict.copy()
for c in list(epgdict.keys()):
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", epgdict[c]["id"], source)
epgdict[chan_obj.number] = epgdict.pop(c)
epgdict[chan_obj.number]["name"] = chan_obj.dict["name"]
epgdict[chan_obj.number]["callsign"] = chan_obj.dict["callsign"]
epgdict[chan_obj.number]["number"] = chan_obj.number
epgdict[chan_obj.number]["id"] = chan_obj.dict["origin_id"]
epgdict[chan_obj.number]["thumbnail"] = chan_obj.thumbnail
# Sort the channels
sorted_channel_list = channel_sort(list(epgdict.keys()))
sorted_chan_guide = {}
for channel in sorted_channel_list:
sorted_chan_guide[channel] = epgdict[channel]
epg_json = json.dumps(sorted_chan_guide, indent=4)
return Response(status=200,
response=epg_json,
mimetype='application/json')
elif method == "current":
nowtime = datetime.datetime.utcnow().timestamp()
chan_guide_list = []
whatson = self.fhdhr.device.epg.whats_on_allchans(source)
# Sort the channels
sorted_channel_list = channel_sort(list(whatson.keys()))
sorted_chan_guide = {}
for channel in sorted_channel_list:
sorted_chan_guide[channel] = whatson[channel]
for channel in list(sorted_chan_guide.keys()):
if sorted_chan_guide[channel]["listing"][0]["time_end"]:
remaining_time = humanized_time(sorted_chan_guide[channel]["listing"][0]["time_end"] - nowtime)
else:
remaining_time = "N/A"
chan_dict = {
"name": sorted_chan_guide[channel]["name"],
"number": sorted_chan_guide[channel]["number"],
"chan_thumbnail": sorted_chan_guide[channel]["thumbnail"],
"listing_title": sorted_chan_guide[channel]["listing"][0]["title"],
"listing_thumbnail": sorted_chan_guide[channel]["listing"][0]["thumbnail"],
"listing_description": sorted_chan_guide[channel]["listing"][0]["description"],
"listing_remaining_time": str(remaining_time)
}
for time_item in ["time_start", "time_end"]:
if not sorted_chan_guide[channel]["listing"][0][time_item]:
chan_dict["listing_%s" % time_item] = "N/A"
elif str(sorted_chan_guide[channel]["listing"][0][time_item]).endswith(tuple(["+0000", "+00:00"])):
chan_dict["listing_%s" % time_item] = str(sorted_chan_guide[channel]["listing"][0][time_item])
else:
chan_dict["listing_%s" % time_item] = str(datetime.datetime.fromtimestamp(sorted_chan_guide[channel]["listing"][0][time_item]))
if source in self.fhdhr.origins.valid_origins:
chan_obj = self.fhdhr.device.channels.get_channel_obj("origin_id", sorted_chan_guide[channel]["id"], source)
chan_dict["name"] = chan_obj.dict["name"]
chan_dict["number"] = chan_obj.number
chan_dict["chan_thumbnail"] = chan_obj.thumbnail
chan_dict["enabled"] = chan_obj.dict["enabled"]
chan_dict["m3u_url"] = chan_obj.api_m3u_url
chan_dict["listing_thumbnail"] = chan_dict["listing_thumbnail"] or chan_obj.thumbnail
else:
if not chan_dict["listing_thumbnail"]:
chan_dict["listing_thumbnail"] = chan_dict["chan_thumbnail"]
if not chan_dict["listing_thumbnail"]:
chan_dict["listing_thumbnail"] = "/api/images?method=generate&type=channel&message=%s" % chan_dict["number"]
chan_guide_list.append(chan_dict)
epg_json = json.dumps(chan_guide_list, indent=4)
return Response(status=200,
response=epg_json,
mimetype='application/json')
elif method == "update":
self.fhdhr.device.epg.update(source)
elif method == "clearcache":
self.fhdhr.device.epg.clear_epg_cache(source)
else:
return "%s Invalid Method" % method
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
else:
return "%s Success" % method

View File

@ -4,6 +4,12 @@ from flask import request, Response, abort
class Images():
endpoints = ["/api/images"]
endpoint_name = "api_images"
endpoint_methods = ["GET", "POST"]
endpoint_default_parameters = {
"method": "generate",
"type": "content",
"message": "Internal Image Handling"
}
def __init__(self, fhdhr):
self.fhdhr = fhdhr
@ -25,7 +31,7 @@ class Images():
elif method == "get":
source = request.args.get('source', default=self.fhdhr.config.dict["epg"]["method"], type=str)
if source in self.fhdhr.config.dict["main"]["valid_epg_methods"]:
if source in list(self.fhdhr.config.dict["epg"]["valid_methods"].keys()):
image_type = request.args.get('type', default="content", type=str)
if image_type in ["content", "channel"]:
image_id = request.args.get('id', default=None, type=str)

127
fHDHR_web/api/m3u.py Normal file
View File

@ -0,0 +1,127 @@
from flask import Response, request, redirect
import urllib.parse
from io import StringIO
from fHDHR.tools import channel_sort
class M3U():
endpoints = ["/api/m3u", "/api/channels.m3u"]
endpoint_name = "api_m3u"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
base_url = request.url_root[:-1]
method = request.args.get('method', default="get", type=str)
channel = request.args.get('channel', default="all", type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "get":
origin_methods = self.fhdhr.origins.valid_origins
origin = request.args.get('origin', default=None, type=str)
if origin and origin not in origin_methods:
return "%s Invalid channels origin" % origin
FORMAT_DESCRIPTOR = "#EXTM3U"
RECORD_MARKER = "#EXTINF"
fakefile = StringIO()
xmltvurl = ('%s/api/xmltv' % base_url)
fakefile.write("%s url-tvg=\"%s\" x-tvg-url=\"%s\"\n" % (FORMAT_DESCRIPTOR, xmltvurl, xmltvurl))
channel_items = []
if origin:
if channel == "all":
fileName = "channels.m3u"
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
if channel_obj.enabled:
channel_items.append(channel_obj)
elif str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("number", origin)]:
channel_obj = self.fhdhr.device.channels.get_channel_obj("number", channel, origin)
fileName = "%s.m3u" % channel_obj.number
if channel_obj.enabled:
channel_items.append(channel_obj)
else:
return "Channel Disabled"
elif channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id", origin)]:
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel, origin)
fileName = "%s.m3u" % channel_obj.number
if channel_obj.enabled:
channel_items.append(channel_obj)
else:
return "Channel Disabled"
elif not origin and channel == "all":
fileName = "channels.m3u"
for origin in list(self.fhdhr.origins.origins_dict.keys()):
for fhdhr_id in [x["id"] for x in self.fhdhr.device.channels.get_channels(origin)]:
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", fhdhr_id, origin)
if channel_obj.enabled:
channel_items.append(channel_obj)
elif not origin and channel != "all" and str(channel) in [str(x) for x in self.fhdhr.device.channels.get_channel_list("id")]:
channel_obj = self.fhdhr.device.channels.get_channel_obj("id", channel)
fileName = "%s.m3u" % channel_obj.number
if channel_obj.enabled:
channel_items.append(channel_obj)
else:
return "Channel Disabled"
else:
return "Invalid Channel"
channels_info = {}
for channel_obj in channel_items:
if self.fhdhr.config.dict["epg"]["images"] == "proxy" or not channel_obj.thumbnail:
logourl = ('%s/api/images?method=get&type=channel&id=%s' %
(base_url, str(channel_obj.dict['origin_id'])))
else:
logourl = channel_obj.thumbnail
channels_info[channel_obj.number] = {
"channelID": str(channel_obj.dict['origin_id']),
"tvg-chno": str(channel_obj.number),
"tvg-name": str(channel_obj.dict['name']),
"tvg-id": str(channel_obj.number),
"tvg-logo": logourl,
"group-title": channel_obj.origin,
"group-titleb": str(channel_obj.dict['name']),
"stream_url": "%s%s" % (base_url, channel_obj.api_stream_url)
}
# Sort the channels
sorted_channel_list = channel_sort(list(channels_info.keys()))
sorted_chan_guide = []
for channel in sorted_channel_list:
sorted_chan_guide.append(channels_info[channel])
for channel_item_dict in sorted_chan_guide:
m3ustring = "%s:0 " % (RECORD_MARKER)
for chan_key in list(channel_item_dict.keys()):
if not chan_key.startswith(tuple(["group-title", "stream_url"])):
m3ustring += "%s=\"%s\" " % (chan_key, channel_item_dict[chan_key])
m3ustring += "group-title=\"%s\",%s\n" % (channel_item_dict["group-title"], channel_item_dict["group-titleb"])
m3ustring += "%s\n" % channel_item_dict["stream_url"]
fakefile.write(m3ustring)
channels_m3u = fakefile.getvalue()
resp = Response(status=200, response=channels_m3u, mimetype='audio/x-mpegurl')
resp.headers["content-disposition"] = "attachment; filename=%s" % fileName
return resp
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
else:
return "%s Success" % method

30
fHDHR_web/api/plugins.py Normal file
View File

@ -0,0 +1,30 @@
from flask import Response
import json
class Plugins_JSON():
endpoints = ["/api/plugins"]
endpoint_name = "api_plugins"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
pluginsjson = {}
for plugin in list(self.fhdhr.plugins.plugins.keys()):
pluginsjson[plugin] = {
"name": plugin,
"manifest": self.fhdhr.plugins.plugins[plugin].manifest
}
plugins_json = json.dumps(pluginsjson, indent=4)
return Response(status=200,
response=plugins_json,
mimetype='application/json')

16
fHDHR_web/api/root_url.py Normal file
View File

@ -0,0 +1,16 @@
from flask import redirect
class Root_URL():
endpoints = ["/"]
endpoint_name = "page_root_html"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
return redirect("/index")

View File

@ -0,0 +1,37 @@
from flask import Response, request, redirect, session
import urllib.parse
import json
class Route_List():
endpoints = ["/api/routes"]
endpoint_name = "api_routes"
endpoint_methods = ["GET", "POST"]
def __init__(self, fhdhr):
self.fhdhr = fhdhr
def __call__(self, *args):
return self.get(*args)
def get(self, *args):
method = request.args.get('method', default="get", type=str)
redirect_url = request.args.get('redirect', default=None, type=str)
if method == "get":
return_json = json.dumps(session["route_list"], indent=4)
return Response(status=200,
response=return_json,
mimetype='application/json')
else:
return "%s Invalid Method" % method
if redirect_url:
return redirect("%s?retmessage=%s" % (redirect_url, urllib.parse.quote("%s Success" % method)))
else:
return "%s Success" % method

Some files were not shown because too many files have changed in this diff Show More