Overview
AI Overviews are reshaping organic traffic, but most rank trackers ignore them. This workflow runs every morning, searches your keyword list on Google via Scavio, and records whether your brand appears in the AI Overview, which position you hold in organic results, and whether you are cited as a source. Over time it builds a trend dataset so you can correlate content changes with AI Overview visibility. 100 keywords per day costs about $0.50 in credits.
Trigger
Cron 7 AM UTC daily
Schedule
Daily 7 AM
Workflow Steps
Load Keyword List
Read the list of target keywords and associated brand URLs from a local JSON file.
Search Each Keyword
For each keyword, call Scavio search on Google and capture the full SERP response.
Extract AI Overview Data
Parse the AI Overview section from each SERP. Record whether brand is mentioned, cited, or absent.
Record Organic Position
Find the brand URL in organic results and record its position for baseline comparison.
Append to Trend Database
Write the daily snapshot to a JSONL file or database table for time-series analysis.
Alert on Significant Changes
Compare against previous day. Alert if a keyword dropped out of AI Overviews or organic top 10.
Python Implementation
import requests, os, json
from pathlib import Path
from datetime import date
API_KEY = os.environ["SCAVIO_API_KEY"]
SH = {"x-api-key": API_KEY, "Content-Type": "application/json"}
BRAND_DOMAIN = "yourdomain.com"
KEYWORDS_FILE = Path("keywords.json")
TREND_FILE = Path("ai_overview_trends.jsonl")
def search(keyword: str) -> dict:
resp = requests.post(
"https://api.scavio.dev/api/v1/search",
headers=SH,
json={"query": keyword, "platform": "google"},
timeout=15,
)
resp.raise_for_status()
return resp.json()
def analyze_serp(serp: dict) -> dict:
ai_ov = serp.get("ai_overview", {})
sources = ai_ov.get("sources", []) if ai_ov else []
in_overview = any(BRAND_DOMAIN in s.get("url", "") for s in sources)
organic = serp.get("organic", [])
org_pos = next((i + 1 for i, r in enumerate(organic) if BRAND_DOMAIN in r.get("url", "")), None)
return {"in_ai_overview": in_overview, "organic_position": org_pos}
def run():
keywords = json.loads(KEYWORDS_FILE.read_text())
prev_data = {}
if TREND_FILE.exists():
lines = TREND_FILE.read_text().strip().split("\n")
if lines and lines[-1]:
prev_data = {e["keyword"]: e for e in json.loads(lines[-1]).get("keywords", [])}
today_snapshot = {"date": str(date.today()), "keywords": []}
alerts = []
for kw in keywords:
serp = search(kw)
result = analyze_serp(serp)
entry = {"keyword": kw, **result}
today_snapshot["keywords"].append(entry)
prev = prev_data.get(kw, {})
if prev.get("in_ai_overview") and not result["in_ai_overview"]:
alerts.append(f"LOST AI Overview: {kw}")
if prev.get("organic_position") and prev["organic_position"] <= 10 and (result["organic_position"] is None or result["organic_position"] > 10):
alerts.append(f"Dropped out of top 10: {kw}")
with open(TREND_FILE, "a") as f:
f.write(json.dumps(today_snapshot) + "\n")
print(f"Tracked {len(keywords)} keywords on {date.today()}")
for a in alerts:
print(f" ALERT: {a}")
run()JavaScript Implementation
const SH = {'x-api-key': process.env.SCAVIO_API_KEY, 'Content-Type': 'application/json'};
const fs = await import('fs');
const BRAND_DOMAIN = 'yourdomain.com';
const keywords = JSON.parse(fs.readFileSync('keywords.json', 'utf8'));
const TREND_FILE = 'ai_overview_trends.jsonl';
async function search(keyword) {
const r = await fetch('https://api.scavio.dev/api/v1/search', {method:'POST', headers:SH, body:JSON.stringify({query:keyword, platform:'google'})});
return r.json();
}
function analyzeSerp(serp) {
const sources = (serp.ai_overview || {}).sources || [];
const inOverview = sources.some(s => (s.url || '').includes(BRAND_DOMAIN));
const orgIdx = (serp.organic || []).findIndex(r => (r.url || '').includes(BRAND_DOMAIN));
return {inAiOverview: inOverview, organicPosition: orgIdx >= 0 ? orgIdx + 1 : null};
}
let prevData = {};
try {
const lines = fs.readFileSync(TREND_FILE, 'utf8').trim().split('\n');
const last = JSON.parse(lines[lines.length - 1]);
for (const e of last.keywords || []) prevData[e.keyword] = e;
} catch {}
const snapshot = {date: new Date().toISOString().split('T')[0], keywords: []};
const alerts = [];
for (const kw of keywords) {
const serp = await search(kw);
const result = analyzeSerp(serp);
snapshot.keywords.push({keyword:kw, ...result});
const prev = prevData[kw] || {};
if (prev.inAiOverview && !result.inAiOverview) alerts.push('LOST AI Overview: '+kw);
if (prev.organicPosition && prev.organicPosition <= 10 && (result.organicPosition === null || result.organicPosition > 10)) alerts.push('Dropped top 10: '+kw);
}
fs.appendFileSync(TREND_FILE, JSON.stringify(snapshot)+'\n');
console.log('Tracked '+keywords.length+' keywords');
alerts.forEach(a => console.log(' ALERT: '+a));Platforms Used
Web search with knowledge graph, PAA, and AI overviews