Overview
Content teams spend hours researching what to write about. This workflow runs every morning, searches your niche topics on Google via Scavio, identifies trending angles by analyzing title patterns and recency, and produces a structured content brief with suggested headlines, key points to cover, and source URLs. It feeds directly into your editorial calendar so writers have briefs ready when they start their day. Researching 10 topics daily costs about $0.05 in credits.
Trigger
Cron 8 AM UTC daily
Schedule
Daily 8 AM
Workflow Steps
Load Topic Seeds
Read the list of niche topics and seed keywords from your editorial configuration file.
Search Trending Content
For each topic, call Scavio search on Google and collect the top 10 results with titles and snippets.
Analyze Title Patterns
Extract common patterns from top-ranking titles: listicles, how-tos, comparisons, and trending angles.
Generate Content Briefs
For each topic, produce a brief with a suggested headline, 5 key points, and 3 source URLs.
Export to Editorial Calendar
Write the briefs to a JSON file or push to your project management tool via webhook.
Python Implementation
import requests, os, json, re
from pathlib import Path
from datetime import date
from collections import Counter
API_KEY = os.environ["SCAVIO_API_KEY"]
SH = {"x-api-key": API_KEY, "Content-Type": "application/json"}
TOPICS_FILE = Path("content_topics.json")
BRIEFS_DIR = Path("content_briefs")
BRIEFS_DIR.mkdir(exist_ok=True)
PATTERNS = {
"listicle": r"\d+\s+(best|top|ways|tips|tools|reasons)",
"how_to": r"how to|step.by.step|guide|tutorial",
"comparison": r"vs\.?|versus|compared|comparison|alternative",
"news": r"2026|new|announce|launch|update",
}
def search_topic(topic: str) -> list:
resp = requests.post(
"https://api.scavio.dev/api/v1/search",
headers=SH,
json={"query": topic, "platform": "google"},
timeout=15,
)
resp.raise_for_status()
return resp.json().get("organic", [])[:10]
def detect_patterns(results: list) -> list:
detected = []
titles = " ".join(r.get("title", "") for r in results).lower()
for name, pattern in PATTERNS.items():
if re.search(pattern, titles, re.IGNORECASE):
detected.append(name)
return detected
def generate_brief(topic: str, results: list) -> dict:
patterns = detect_patterns(results)
key_points = []
seen = set()
for r in results:
snippet = r.get("snippet", "")
words = snippet.split()[:15]
point = " ".join(words)
if point and point not in seen:
key_points.append(point)
seen.add(point)
if len(key_points) >= 5:
break
sources = [{"title": r.get("title", ""), "url": r.get("url", "")} for r in results[:3]]
return {
"topic": topic,
"suggested_angle": patterns[0] if patterns else "informational",
"key_points": key_points,
"sources": sources,
"patterns_detected": patterns,
}
def run():
topics = json.loads(TOPICS_FILE.read_text())
briefs = []
for topic in topics:
results = search_topic(topic)
brief = generate_brief(topic, results)
briefs.append(brief)
out = BRIEFS_DIR / f"briefs_{date.today()}.json"
out.write_text(json.dumps(briefs, indent=2))
print(f"Generated {len(briefs)} content briefs for {date.today()}")
for b in briefs:
print(f" {b['topic']}: angle={b['suggested_angle']}, {len(b['key_points'])} points")
run()JavaScript Implementation
const SH = {'x-api-key': process.env.SCAVIO_API_KEY, 'Content-Type': 'application/json'};
const fs = await import('fs');
const BRIEFS_DIR = 'content_briefs';
try { fs.mkdirSync(BRIEFS_DIR); } catch {}
const topics = JSON.parse(fs.readFileSync('content_topics.json', 'utf8'));
const PATTERNS = {listicle:/\d+\s+(best|top|ways|tips|tools|reasons)/i, howTo:/how to|step.by.step|guide|tutorial/i, comparison:/vs\.?|versus|compared|comparison|alternative/i, news:/2026|new|announce|launch|update/i};
async function searchTopic(topic) {
const r = await fetch('https://api.scavio.dev/api/v1/search', {method:'POST', headers:SH, body:JSON.stringify({query:topic, platform:'google'})});
return ((await r.json()).organic || []).slice(0,10);
}
function detectPatterns(results) {
const titles = results.map(r=>r.title||'').join(' ');
return Object.entries(PATTERNS).filter(([k,p])=>p.test(titles)).map(([k])=>k);
}
function generateBrief(topic, results) {
const patterns = detectPatterns(results);
const keyPoints = [];
const seen = new Set();
for (const r of results) {
const point = (r.snippet||'').split(' ').slice(0,15).join(' ');
if (point && !seen.has(point)) { keyPoints.push(point); seen.add(point); }
if (keyPoints.length >= 5) break;
}
return {topic, suggestedAngle:patterns[0]||'informational', keyPoints, sources:results.slice(0,3).map(r=>({title:r.title||'', url:r.url||''})), patternsDetected:patterns};
}
const briefs = [];
for (const topic of topics) {
const results = await searchTopic(topic);
briefs.push(generateBrief(topic, results));
}
const today = new Date().toISOString().split('T')[0];
fs.writeFileSync(BRIEFS_DIR+'/briefs_'+today+'.json', JSON.stringify(briefs, null, 2));
console.log('Generated '+briefs.length+' content briefs');
briefs.forEach(b => console.log(' '+b.topic+': angle='+b.suggestedAngle+', '+b.keyPoints.length+' points'));Platforms Used
Web search with knowledge graph, PAA, and AI overviews