This commit is contained in:
deathbybandaid 2020-12-06 12:38:52 -05:00
parent c801528e53
commit 85eb3009a0

View File

@ -1,3 +1,4 @@
import datetime
class OriginEPG(): class OriginEPG():
@ -5,9 +6,14 @@ class OriginEPG():
def __init__(self, fhdhr): def __init__(self, fhdhr):
self.fhdhr = fhdhr self.fhdhr = fhdhr
self.base_api = 'https://valencia-app-mds.xumo.com/v2/'
def update_epg(self, fhdhr_channels): def update_epg(self, fhdhr_channels):
programguide = {} programguide = {}
todaydate = datetime.datetime.utcnow().date()
self.remove_stale_cache(todaydate)
for fhdhr_id in list(fhdhr_channels.list.keys()): for fhdhr_id in list(fhdhr_channels.list.keys()):
c = fhdhr_channels.list[fhdhr_id].dict c = fhdhr_channels.list[fhdhr_id].dict
@ -20,8 +26,56 @@ class OriginEPG():
"thumbnail": c["thumbnail"], "thumbnail": c["thumbnail"],
"listing": [], "listing": [],
} }
cached_items = self.get_cached(c["origin_id"])
# for hour_num in range(1, 24): print(len(cached_items))
# print(hour_num)
return programguide return programguide
def get_cached(self, channel_id):
for hour_num in range(0, 24):
lineup_url = "channels/channel/%s/broadcast.json?hour=%s" % (self.base_api, channel_id, hour_num)
self.get_cached_item(channel_id, hour_num, lineup_url)
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
return [self.fhdhr.db.get_cacheitem_value(x, "offline_cache", "origin") for x in cache_list]
def get_cached_item(self, channel_id, cache_key, url):
cache_key = datetime.datetime.today().replace(hour=cache_key)
cache_key = datetime.datetime.strptime(cache_key, '%Y-%m-%dT%H:%M:%S').timestamp()
cache_key = channel_id + "_" + cache_key
cacheitem = self.fhdhr.db.get_cacheitem_value(str(cache_key), "offline_cache", "origin")
if cacheitem:
self.fhdhr.logger.info('FROM CACHE: ' + str(cache_key))
return cacheitem
else:
self.fhdhr.logger.info('Fetching: ' + url)
try:
resp = self.fhdhr.web.session.get(url)
except self.fhdhr.web.exceptions.HTTPError:
self.fhdhr.logger.info('Got an error! Ignoring it.')
return
result = resp.json()
self.fhdhr.db.set_cacheitem_value(str(cache_key), "offline_cache", result, "origin")
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
cache_list.append(str(cache_key))
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", cache_list, "origin")
def remove_stale_cache(self, todaydate):
cache_clear_time = todaydate.strftime('%Y-%m-%dT%H:00:00')
cache_clear_time = datetime.datetime.strptime(cache_clear_time, '%Y-%m-%dT%H:%M:%S').timestamp()
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
cache_to_kill = []
for cacheitem in cache_list:
cachetime = str(cacheitem).split("_")[-1]
if float(cachetime) < cache_clear_time:
cache_to_kill.append(cacheitem)
self.fhdhr.db.delete_cacheitem_value(str(cacheitem), "offline_cache", "origin")
self.fhdhr.logger.info('Removing stale cache: ' + str(cacheitem))
self.fhdhr.db.set_cacheitem_value("cache_list", "offline_cache", [x for x in cache_list if x not in cache_to_kill], "origin")
def clear_cache(self):
cache_list = self.fhdhr.db.get_cacheitem_value("cache_list", "offline_cache", "origin") or []
for cacheitem in cache_list:
self.fhdhr.db.delete_cacheitem_value(cacheitem, "offline_cache", "origin")
self.fhdhr.logger.info('Removing cache: ' + str(cacheitem))
self.fhdhr.db.delete_cacheitem_value("cache_list", "offline_cache", "origin")