bug fixing

This commit is contained in:
2026-01-26 13:24:40 +01:00
parent 29a7f12abe
commit 37eb03583c
11 changed files with 493 additions and 48 deletions

View File

@@ -21,10 +21,6 @@ async def main():
setup_logger()
logger = get_logger()
logger.info("=" * 60)
logger.info("News Agent starting...")
logger.info("=" * 60)
try:
# Load configuration
config = get_config()
@@ -39,17 +35,18 @@ async def main():
# Initialize RSS fetcher
fetcher = RSSFetcher()
# Fetch articles from all sources
logger.info(f"Fetching from {len(config.rss_sources)} RSS sources...")
# Fetch articles from all sources (silently)
articles = await fetcher.fetch_all(config.rss_sources)
if not articles:
logger.warning("No articles fetched from any source")
await fetcher.close()
return
# Save articles to database (deduplication)
new_articles_count = await db.save_articles(articles)
# Log only the summary
logger.info(f"Total articles fetched from all sources: {len(articles)}")
logger.info(
f"Saved {new_articles_count} new articles (filtered {len(articles) - new_articles_count} duplicates)"
)
@@ -60,24 +57,19 @@ async def main():
unprocessed = await db.get_unprocessed_articles()
if not unprocessed:
logger.info("No new articles to process")
return
logger.info(f"Processing {len(unprocessed)} new articles with AI...")
# Initialize AI components
ai_client = OpenRouterClient()
filter_ai = ArticleFilter(ai_client)
summarizer = ArticleSummarizer(ai_client)
# Filter articles by relevance
logger.info("Filtering articles by relevance...")
# Filter articles by relevance (silently)
filtered_articles = await filter_ai.filter_articles(
unprocessed, max_articles=config.ai.filtering.max_articles
)
if not filtered_articles:
logger.warning("No relevant articles found after filtering")
# Mark all as processed but not included
for article in unprocessed:
await db.update_article_processing(
@@ -85,14 +77,15 @@ async def main():
)
return
logger.info(f"Selected {len(filtered_articles)} relevant articles")
# Summarize filtered articles (using batch processing for speed, silently)
# Extract just the articles for batch summarization
articles_to_summarize = [article for article, score in filtered_articles]
summaries_dict = await summarizer.summarize_batch(articles_to_summarize)
# Summarize filtered articles
logger.info("Generating AI summaries...")
# Create digest entries with summaries
digest_entries = []
for article, score in filtered_articles:
summary = await summarizer.summarize(article)
summary = summaries_dict[article.id]
# Update database
await db.update_article_processing(
@@ -116,8 +109,7 @@ async def main():
article.id, relevance_score=0.0, ai_summary="", included=False
)
# Generate email
logger.info("Generating email digest...")
# Generate email (silently)
generator = EmailGenerator()
date_str = datetime.now().strftime("%A, %B %d, %Y")
@@ -127,16 +119,11 @@ async def main():
digest_entries, date_str, subject
)
# Send email
logger.info("Sending email...")
# Send email (silently)
sender = EmailSender()
success = sender.send(subject, html_content, text_content)
if success:
logger.info("=" * 60)
logger.info(f"Daily digest sent successfully with {len(digest_entries)} articles!")
logger.info("=" * 60)
else:
if not success:
logger.error("Failed to send email")
except Exception as e: