Simplify timing logic

This commit is contained in:
cryobry
2018-05-13 16:15:09 -07:00
parent e9801d33e5
commit db1900c7e7

View File

@@ -16,7 +16,8 @@ feedurls = ['http://linuxactionnews.com/rss', \
'http://feeds.soundcloud.com/users/soundcloud:users:146429914/sounds.rss', \ 'http://feeds.soundcloud.com/users/soundcloud:users:146429914/sounds.rss', \
'http://billburr.libsyn.com/rss', \ 'http://billburr.libsyn.com/rss', \
'http://feeds.99percentinvisible.org/99percentinvisible', \ 'http://feeds.99percentinvisible.org/99percentinvisible', \
'http://rss.art19.com/tim-ferriss-show'] 'http://rss.art19.com/tim-ferriss-show', \
'http://feed.thisamericanlife.org/talpodcast']
# number of old episodes to keep # number of old episodes to keep
old_episodes_keep = 3 old_episodes_keep = 3
@@ -44,17 +45,6 @@ opener=urllib.request.build_opener()
opener.addheaders=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1941.0 Safari/537.36')] opener.addheaders=[('User-Agent','Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/36.0.1941.0 Safari/537.36')]
urllib.request.install_opener(opener) urllib.request.install_opener(opener)
# create new timestamp
timestamp_file = os.path.join(download_root_dir, '.last_sync')
new_ts = int(time.time())
# get old timestamp
if os.path.exists(timestamp_file):
with open(timestamp_file) as f:
old_ts = int(f.readline())
else:
old_ts = 0
################################# #################################
######### GET EPISODES ########## ######### GET EPISODES ##########
################################# #################################
@@ -81,47 +71,43 @@ for feed in feedurls:
# parse episodes # parse episodes
for episode in parsed['episodes']: for episode in parsed['episodes']:
# get release time
# format release time to date format
release_time = int(episode['published']) release_time = int(episode['published'])
episode_date = datetime.datetime.fromtimestamp(release_time)
episode_date = episode_date.strftime("%y-%m-%d")
# debug # debug
if debug == 1: if debug == 1:
print(old_ts)
print(release_time) print(release_time)
print(episode_date)
if release_time > old_ts: # create filename based on episode date and/or episode title
# format release time to date format if episode_name_in_filename == 0:
episode_date = datetime.datetime.fromtimestamp(release_time) episode_title = episode_date
episode_date = episode_date.strftime("%y-%m-%d") else:
episode_title = episode_date + " " + episode['title']
# debug full_episode_path = os.path.splitext(os.path.join(podcast_download_dir, episode_title))[0]
if debug == 1:
print(episode_date)
# create filename based on episode date and/or episode title for enclosures in episode['enclosures']:
if episode_name_in_filename == 0: # get download url
episode_title = episode_date url = enclosures['url']
else: if debug == 1:
episode_title = episode_date + " " + episode['title'] print(url)
if enclosures['mime_type'] == 'audio/mpeg':
full_episode_path = os.path.splitext(os.path.join(podcast_download_dir, episode_title))[0] full_episode_path += '.mp3'
# download file and save to episode title
for enclosures in episode['enclosures']: if not os.path.exists(full_episode_path):
# get download url try:
url = enclosures['url'] print('Downloading ' + episode['title'])
if enclosures['mime_type'] == 'audio/mpeg': urllib.request.urlretrieve(url, full_episode_path)
full_episode_path += '.mp3' except urllib.error.HTTPError as e:
# download file and save to episode title print('HTTPError' + ': Could not download ' + podcast_title + '\n' + 'Reason: ' + e.reason + '\n' + 'URL: ' + url)
if not os.path.exists(full_episode_path): except urllib.error.URLError:
try: print('URLError')
print('Downloading ' + episode['title']) except urllib.error.ContentTooShortError:
urllib.request.urlretrieve(url, full_episode_path) print('Download failed, file corrupt!')
except urllib.error.HTTPError as e:
print('HTTPError' + ': Could not download ' + podcast_title + '\n' + 'Reason: ' + e.reason + '\n' + 'URL: ' + url)
except urllib.error.URLError:
print('URLError')
except urllib.error.ContentTooShortError:
print('Download failed, file corrupt!')
# remove old episodes # remove old episodes
old_files = glob.glob(os.path.join(podcast_download_dir, '*')) old_files = glob.glob(os.path.join(podcast_download_dir, '*'))
@@ -129,10 +115,6 @@ for feed in feedurls:
for file in old_files[:-old_episodes_keep]: for file in old_files[:-old_episodes_keep]:
os.unlink(file) os.unlink(file)
# write new timestamp file
with open(timestamp_file, 'w') as outf:
outf.write(str(new_ts))
################################# #################################
######## MANAGE HANDHELD ######## ######## MANAGE HANDHELD ########
################################# #################################