podcastrr/app/services/podcast_updater.py
2025-06-14 22:53:38 -07:00

184 lines
6.6 KiB
Python

"""
Podcast updater service for Podcastrr.
"""
import logging
from datetime import datetime, timedelta
from flask import current_app
from app.models.database import db
from app.models.podcast import Podcast, Episode
from app.models.settings import Settings
from app.services.podcast_search import get_podcast_episodes
from app.services.podcast_downloader import download_episode
# Set up logging
logger = logging.getLogger(__name__)
def update_all_podcasts():
"""
Update all podcasts in the database.
Returns:
dict: Statistics about the update process.
"""
podcasts = Podcast.query.all()
stats = {
'podcasts_updated': 0,
'new_episodes': 0,
'episodes_downloaded': 0,
'errors': 0
}
for podcast in podcasts:
try:
result = update_podcast(podcast.id)
stats['podcasts_updated'] += 1
stats['new_episodes'] += result['new_episodes']
stats['episodes_downloaded'] += result['episodes_downloaded']
except Exception as e:
logger.error(f"Error updating podcast {podcast.title}: {str(e)}")
stats['errors'] += 1
return stats
def update_podcast(podcast_id):
"""
Update a specific podcast.
Args:
podcast_id (int): ID of the podcast to update.
Returns:
dict: Statistics about the update process.
"""
podcast = Podcast.query.get_or_404(podcast_id)
stats = {
'new_episodes': 0,
'episodes_downloaded': 0,
'feed_status': 'success'
}
try:
logger.info(f"Updating podcast: {podcast.title} (ID: {podcast.id})")
logger.info(f"Feed URL: {podcast.feed_url}")
# Get episodes from feed
episodes = get_podcast_episodes(podcast.feed_url)
# Update podcast last_checked timestamp
podcast.last_checked = datetime.utcnow()
if not episodes:
logger.warning(f"No episodes found for podcast: {podcast.title}")
stats['feed_status'] = 'no_episodes'
# Check if we need to refresh the feed URL from iTunes
if podcast.external_id:
try:
from app.services.podcast_search import search_podcasts
logger.info(f"Trying to refresh feed URL from iTunes for podcast ID: {podcast.external_id}")
podcast_data = search_podcasts(podcast_id=podcast.external_id)
if podcast_data and podcast_data.get('feed_url') and podcast_data['feed_url'] != podcast.feed_url:
logger.info(f"Updated feed URL from {podcast.feed_url} to {podcast_data['feed_url']}")
podcast.feed_url = podcast_data['feed_url']
db.session.commit()
# Try again with the new feed URL
episodes = get_podcast_episodes(podcast.feed_url)
logger.info(f"Found {len(episodes)} episodes with updated feed URL")
except Exception as e:
logger.error(f"Error refreshing feed URL: {str(e)}")
# Process each episode
for episode_data in episodes:
# Skip episodes without required fields
if not episode_data.get('guid'):
logger.warning(f"Skipping episode without GUID: {episode_data.get('title', 'Unknown')}")
continue
if not episode_data.get('audio_url'):
logger.warning(f"Skipping episode without audio URL: {episode_data.get('title', 'Unknown')}")
continue
# Check if episode already exists
existing = Episode.query.filter_by(guid=episode_data['guid']).first()
if not existing:
# Create new episode
try:
episode = Episode(
podcast_id=podcast.id,
title=episode_data.get('title', ''),
description=episode_data.get('description', ''),
audio_url=episode_data.get('audio_url', ''),
image_url=episode_data.get('image_url', podcast.image_url), # Use podcast image if episode has none
published_date=episode_data.get('published_date'),
duration=episode_data.get('duration'),
file_size=episode_data.get('file_size'),
episode_number=episode_data.get('episode_number'),
guid=episode_data['guid'],
downloaded=False
)
db.session.add(episode)
stats['new_episodes'] += 1
logger.info(f"Added new episode: {episode.title}")
# Auto-download if enabled
if podcast.auto_download and episode.audio_url:
try:
download_episode(episode)
stats['episodes_downloaded'] += 1
logger.info(f"Auto-downloaded episode: {episode.title}")
except Exception as e:
logger.error(f"Error auto-downloading episode {episode.title}: {str(e)}")
except Exception as e:
logger.error(f"Error adding episode: {str(e)}")
# Update podcast last_updated timestamp if new episodes were found
if stats['new_episodes'] > 0:
podcast.last_updated = datetime.utcnow()
db.session.commit()
logger.info(f"Podcast update completed: {stats}")
return stats
except Exception as e:
db.session.rollback()
logger.error(f"Error updating podcast {podcast.title}: {str(e)}")
stats['feed_status'] = 'error'
stats['error'] = str(e)
raise
def schedule_updates():
"""
Schedule podcast updates based on settings.
This function is meant to be called by a scheduler (e.g., APScheduler).
"""
logger.info("Starting scheduled podcast updates")
try:
stats = update_all_podcasts()
logger.info(f"Scheduled update completed: {stats}")
except Exception as e:
logger.error(f"Error during scheduled update: {str(e)}")
def clean_old_downloads():
"""
Clean up old downloaded episodes.
This function is meant to be called by a scheduler (e.g., APScheduler).
"""
from app.services.podcast_downloader import delete_old_episodes
logger.info("Starting cleanup of old downloads")
try:
count = delete_old_episodes()
logger.info(f"Deleted {count} old episodes")
except Exception as e:
logger.error(f"Error during cleanup: {str(e)}")