From c1faa8a43fb7833dd449c0ec306b46bc928cf16c Mon Sep 17 00:00:00 2001 From: deathbybandaid Date: Sun, 6 Dec 2020 13:11:07 -0500 Subject: [PATCH] yrdy --- fHDHR/origin/origin_epg.py | 63 +++++++++++++++++++++++++++++++++++++- 1 file changed, 62 insertions(+), 1 deletion(-) diff --git a/fHDHR/origin/origin_epg.py b/fHDHR/origin/origin_epg.py index bdc091e..4202637 100644 --- a/fHDHR/origin/origin_epg.py +++ b/fHDHR/origin/origin_epg.py @@ -28,15 +28,76 @@ class OriginEPG(): } cached_items = self.get_cached(c["origin_id"]) print(len(cached_items)) + for cached_item in cached_items: + + for asset in cached_item["assets"]: + + content_id = asset["id"] + content_cache = self.get_cached_content(content_id) + + timestart = self.xumo_xmltime(asset['timestamps']["start"]) + timeend = self.xumo_xmltime(asset['timestamps']["end"]) + + clean_prog_dict = { + "time_start": timestart, + "time_end": timeend, + "duration_minutes": str((timeend - timestart) / 60), + "thumbnail": None, + "title": content_cache['title'] or "Unavailable", + "sub-title": "Unavailable", + "description": self.getDescription(content_cache["descriptions"]) or "Unavailable", + "rating": "N/A", + "episodetitle": "Unavailable", + "releaseyear": None, + "genres": None, + "seasonnumber": None, + "episodenumber": None, + "isnew": None, + "id": str(content_id) + } return programguide + def getDescription(self, description): + if 'large' in description: + return description['large'] + elif 'medium' in description: + return description['medium'] + elif 'small' in description: + return description['small'] + elif 'tiny' in description: + return description['tiny'] + else: + return None + + def xumo_xmltime(self, tm): + tm = datetime.datetime.fromtimestamp(tm/1000.0) + tm = str(tm.strftime('%Y%m%d%H%M%S')) + " +0000" + return tm + + def get_cached_content(self, content_id): + cacheitem = self.fhdhr.db.get_cacheitem_value(str(content_id), "content_cache", "origin") + if cacheitem: + self.fhdhr.logger.info('FROM CACHE: ' + str(content_id)) + return cacheitem + else: + content_url = "%sassets/asset/%s.json?f=title&f=providers&f=descriptions&f=runtime&f=availableSince" % (self.base_api, content_id) + self.fhdhr.logger.info('Fetching: ' + content_url) + try: + resp = self.fhdhr.web.session.get(content_url) + except self.fhdhr.web.exceptions.HTTPError: + self.fhdhr.logger.info('Got an error! Ignoring it.') + return None + result = resp.json() + self.fhdhr.db.set_cacheitem_value(str(content_id), "offline_cache", result, "origin") + return result + def get_cached(self, channel_id): for hour_num in range(0, 24): lineup_url = "%schannels/channel/%s/broadcast.json?hour=%s" % (self.base_api, channel_id, hour_num) self.get_cached_item(channel_id, hour_num, lineup_url) cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or [] - return [self.fhdhr.db.get_cacheitem_value(x, "offline_cache", "origin") for x in cache_list] + return [self.fhdhr.db.get_cacheitem_value(x, "offline_cache", "origin") for x in cache_list if x.startswith(channel_id)] def get_cached_item(self, channel_id, cache_key, url): cache_key = datetime.datetime.today().replace(hour=cache_key).timestamp()