Workflow

Agentic SEO Weekly Content Ops

Run a full weekly SEO content pipeline: gap analysis, briefs, and draft generation. Automate content ops with search-driven intelligence.

Overview

SEO content operations involve multiple steps that are usually handled by different team members across different tools. This workflow consolidates the entire weekly pipeline: Monday morning it runs a content gap analysis by searching your target keywords and checking where you rank, generates prioritized content briefs based on gaps, and drafts outlines with suggested headings and key points sourced from top-ranking pages. One full cycle across 20 keywords costs about 20-40 credits ($0.10-$0.20) depending on depth.

Trigger

Cron Monday 7 AM UTC

Schedule

Weekly Monday 7 AM

Workflow Steps

1

Load SEO Keyword Targets

Read your target keyword list with current URL mappings and priority scores from config.

2

Run Content Gap Analysis

For each keyword, search Google via Scavio. Check if your domain ranks and identify gaps where you have no content.

3

Analyze Top-Ranking Content

For gap keywords, extract heading patterns, word count indicators, and content format from top 5 results.

4

Generate Prioritized Briefs

Create content briefs ranked by keyword difficulty and business priority. Include suggested headings and sources.

5

Draft Content Outlines

For the top 5 briefs, generate detailed outlines with H2/H3 structure, key points, and reference URLs.

6

Export to Content Pipeline

Write briefs and outlines to JSON files and optionally push to your project management tool.

Python Implementation

Python
import requests, os, json, re
from pathlib import Path
from datetime import date

API_KEY = os.environ["SCAVIO_API_KEY"]
SH = {"x-api-key": API_KEY, "Content-Type": "application/json"}
MY_DOMAIN = "yourdomain.com"
KEYWORDS_FILE = Path("seo_keywords.json")
OUTPUT_DIR = Path("content_ops")
OUTPUT_DIR.mkdir(exist_ok=True)

def search_keyword(keyword: str) -> list:
    resp = requests.post(
        "https://api.scavio.dev/api/v1/search",
        headers=SH,
        json={"query": keyword, "platform": "google"},
        timeout=15,
    )
    resp.raise_for_status()
    return resp.json().get("organic", [])

def gap_analysis(keywords: list) -> list:
    gaps = []
    for kw_config in keywords:
        keyword = kw_config["keyword"]
        priority = kw_config.get("priority", 5)
        results = search_keyword(keyword)
        my_position = None
        for i, r in enumerate(results):
            if MY_DOMAIN in r.get("url", ""):
                my_position = i + 1
                break
        if my_position is None or my_position > 10:
            gaps.append({
                "keyword": keyword,
                "priority": priority,
                "current_position": my_position,
                "top_results": results[:5],
            })
    gaps.sort(key=lambda g: g["priority"], reverse=True)
    return gaps

def generate_brief(gap: dict) -> dict:
    top = gap["top_results"]
    headings = []
    for r in top:
        title = r.get("title", "")
        headings.append(title)
    formats = []
    for h in headings:
        if re.search(r"\d+", h):
            formats.append("listicle")
        elif re.search(r"how to|guide", h, re.IGNORECASE):
            formats.append("guide")
        else:
            formats.append("informational")
    suggested_format = max(set(formats), key=formats.count) if formats else "informational"
    return {
        "keyword": gap["keyword"],
        "priority": gap["priority"],
        "suggested_format": suggested_format,
        "reference_headings": headings,
        "sources": [{"title": r.get("title", ""), "url": r.get("url", "")} for r in top[:3]],
    }

def generate_outline(brief: dict) -> dict:
    headings = brief["reference_headings"]
    outline = {
        "keyword": brief["keyword"],
        "h1": f"Comprehensive Guide: {brief['keyword'].title()}",
        "sections": [
            {"h2": "Introduction", "points": [f"Why {brief['keyword']} matters in 2026", "Who this guide is for"]},
            {"h2": f"What is {brief['keyword'].title()}", "points": ["Definition and context", "Key terminology"]},
        ],
    }
    for h in headings[:3]:
        outline["sections"].append({"h2": h, "points": ["Expand on this angle", "Add unique data or perspective"]})
    outline["sections"].append({"h2": "Conclusion", "points": ["Key takeaways", "Next steps for the reader"]})
    return outline

def run():
    keywords = json.loads(KEYWORDS_FILE.read_text())
    gaps = gap_analysis(keywords)
    briefs = [generate_brief(g) for g in gaps[:10]]
    outlines = [generate_outline(b) for b in briefs[:5]]

    out = OUTPUT_DIR / f"content_ops_{date.today()}.json"
    out.write_text(json.dumps({"gaps": len(gaps), "briefs": briefs, "outlines": outlines}, indent=2))
    print(f"Content ops for {date.today()}: {len(gaps)} gaps, {len(briefs)} briefs, {len(outlines)} outlines")
    for b in briefs[:5]:
        print(f"  [{b['priority']}] {b['keyword']} ({b['suggested_format']})")

run()

JavaScript Implementation

JavaScript
const SH = {'x-api-key': process.env.SCAVIO_API_KEY, 'Content-Type': 'application/json'};
const fs = await import('fs');

const MY_DOMAIN = 'yourdomain.com';
const keywords = JSON.parse(fs.readFileSync('seo_keywords.json', 'utf8'));
const OUTPUT_DIR = 'content_ops';
try { fs.mkdirSync(OUTPUT_DIR); } catch {}

async function searchKeyword(keyword) {
  const r = await fetch('https://api.scavio.dev/api/v1/search', {method:'POST', headers:SH, body:JSON.stringify({query:keyword, platform:'google'})});
  return (await r.json()).organic || [];
}

async function gapAnalysis() {
  const gaps = [];
  for (const kwConfig of keywords) {
    const results = await searchKeyword(kwConfig.keyword);
    const myPos = results.findIndex(r=>(r.url||'').includes(MY_DOMAIN));
    if (myPos < 0 || myPos >= 10) {
      gaps.push({keyword:kwConfig.keyword, priority:kwConfig.priority||5, currentPosition:myPos>=0?myPos+1:null, topResults:results.slice(0,5)});
    }
  }
  return gaps.sort((a,b)=>b.priority-a.priority);
}

function generateBrief(gap) {
  const headings = gap.topResults.map(r=>r.title||'');
  const formats = headings.map(h=>/\d+/.test(h)?'listicle':/how to|guide/i.test(h)?'guide':'informational');
  const counts = {};
  formats.forEach(f=>{counts[f]=(counts[f]||0)+1});
  const suggestedFormat = Object.entries(counts).sort((a,b)=>b[1]-a[1])[0]?.[0]||'informational';
  return {keyword:gap.keyword, priority:gap.priority, suggestedFormat, referenceHeadings:headings, sources:gap.topResults.slice(0,3).map(r=>({title:r.title||'', url:r.url||''}))};
}

function generateOutline(brief) {
  const sections = [
    {h2:'Introduction', points:['Why '+brief.keyword+' matters in 2026', 'Who this guide is for']},
    {h2:'What is '+brief.keyword, points:['Definition and context', 'Key terminology']},
  ];
  brief.referenceHeadings.slice(0,3).forEach(h=>{sections.push({h2:h, points:['Expand on this angle', 'Add unique data']})});
  sections.push({h2:'Conclusion', points:['Key takeaways', 'Next steps']});
  return {keyword:brief.keyword, h1:'Comprehensive Guide: '+brief.keyword, sections};
}

const gaps = await gapAnalysis();
const briefs = gaps.slice(0,10).map(g=>generateBrief(g));
const outlines = briefs.slice(0,5).map(b=>generateOutline(b));
const today = new Date().toISOString().split('T')[0];
fs.writeFileSync(OUTPUT_DIR+'/content_ops_'+today+'.json', JSON.stringify({gaps:gaps.length, briefs, outlines}, null, 2));
console.log('Content ops: '+gaps.length+' gaps, '+briefs.length+' briefs, '+outlines.length+' outlines');
briefs.slice(0,5).forEach(b => console.log('  ['+b.priority+'] '+b.keyword+' ('+b.suggestedFormat+')'));

Platforms Used

Google

Web search with knowledge graph, PAA, and AI overviews

Frequently Asked Questions

SEO content operations involve multiple steps that are usually handled by different team members across different tools. This workflow consolidates the entire weekly pipeline: Monday morning it runs a content gap analysis by searching your target keywords and checking where you rank, generates prioritized content briefs based on gaps, and drafts outlines with suggested headings and key points sourced from top-ranking pages. One full cycle across 20 keywords costs about 20-40 credits ($0.10-$0.20) depending on depth.

This workflow uses a cron monday 7 am utc. Weekly Monday 7 AM.

This workflow uses the following Scavio platforms: google. Each platform is called via the same unified API endpoint.

Yes. Scavio's free tier includes 250 credits per month with no credit card required. That is enough to test and validate this workflow before scaling it.

Agentic SEO Weekly Content Ops

Run a full weekly SEO content pipeline: gap analysis, briefs, and draft generation. Automate content ops with search-driven intelligence.