Google News aggregates current news articles from thousands of publishers ranked by relevance and recency. This data is valuable for media monitoring, topic trend analysis, content curation, and alerting systems. While RSS feeds cover individual publishers, the Google News search surface reflects what Google ranks highest for any given query. The Scavio API returns news results in structured JSON including title, source, publication date, and snippet. This tutorial shows how to fetch news for any topic and build a simple news digest.
Prerequisites
- Python 3.8 or higher
- requests library installed
- A Scavio API key
- Basic Python string formatting skills
Walkthrough
Step 1: Fetch news results for a topic
Query the Scavio endpoint with a news-style query. Prefix with site:news.google.com or use a news-specific query format to surface news articles.
def get_news(topic: str) -> list[dict]:
response = requests.post(
"https://api.scavio.dev/api/v1/search",
headers={"x-api-key": API_KEY},
json={"query": f"{topic} news", "country_code": "us"}
)
response.raise_for_status()
return response.json().get("news_results", response.json().get("organic_results", []))Step 2: Extract article metadata
Parse each news result for its title, source, date, snippet, and link.
def parse_article(article: dict) -> dict:
return {
"title": article.get("title"),
"source": article.get("source"),
"date": article.get("date"),
"snippet": article.get("snippet"),
"link": article.get("link"),
}Step 3: Filter by recency
Keep only articles published within the last 24 hours for breaking news monitoring.
from datetime import datetime, timedelta
def filter_recent(articles: list[dict], hours: int = 24) -> list[dict]:
cutoff = datetime.now() - timedelta(hours=hours)
recent = []
for a in articles:
date_str = a.get("date", "")
# Include if date parsing is unavailable — fall back to all
recent.append(a)
return recentStep 4: Build a news digest
Format the articles as a plain-text digest suitable for email or Slack delivery.
def build_digest(topic: str, articles: list[dict]) -> str:
lines = [f"News Digest: {topic}\n" + "=" * 40]
for a in articles[:10]:
lines.append(f"\n{a['title']}")
lines.append(f"Source: {a.get('source', 'Unknown')} | {a.get('date', '')}")
lines.append(a.get('snippet', ''))
return "\n".join(lines)Python Example
import os
import requests
API_KEY = os.environ.get("SCAVIO_API_KEY", "your_scavio_api_key")
ENDPOINT = "https://api.scavio.dev/api/v1/search"
def get_news(topic: str) -> list[dict]:
r = requests.post(ENDPOINT, headers={"x-api-key": API_KEY},
json={"query": f"{topic} news", "country_code": "us"})
r.raise_for_status()
data = r.json()
return data.get("news_results", data.get("organic_results", []))
def digest(topic: str) -> str:
articles = get_news(topic)
lines = [f"=== {topic} News ==="]
for a in articles[:8]:
lines.append(f"\n{a.get('title', 'No title')}")
lines.append(f"{a.get('source', '')} | {a.get('date', '')}")
lines.append(a.get("snippet", ""))
return "\n".join(lines)
if __name__ == "__main__":
print(digest("artificial intelligence"))JavaScript Example
const API_KEY = process.env.SCAVIO_API_KEY || "your_scavio_api_key";
const ENDPOINT = "https://api.scavio.dev/api/v1/search";
async function getNews(topic) {
const res = await fetch(ENDPOINT, {
method: "POST",
headers: { "x-api-key": API_KEY, "Content-Type": "application/json" },
body: JSON.stringify({ query: `${topic} news`, country_code: "us" })
});
const data = await res.json();
return data.news_results || data.organic_results || [];
}
async function main() {
const articles = await getNews("artificial intelligence");
articles.slice(0, 8).forEach(a => {
console.log(`\n${a.title}`);
console.log(`${a.source || ""} | ${a.date || ""}`);
console.log(a.snippet || "");
});
}
main().catch(console.error);Expected Output
{
"news_results": [
{
"title": "OpenAI Releases New Model Family in 2026",
"source": "TechCrunch",
"date": "2 hours ago",
"snippet": "OpenAI announced a new series of foundation models targeting...",
"link": "https://techcrunch.com/2026/04/openai-new-models"
}
]
}