Effective prospect research requires fresh data: company news, technology stack, hiring patterns, and recent funding. Manual research per prospect takes 15-30 minutes. This tutorial builds an automated pipeline that uses the Scavio API to search for prospect signals, extract key facts from SERP snippets, and compile a structured prospect profile. Each profile costs about $0.015 (3 searches) and takes under 5 seconds to generate.
Prerequisites
- Python 3.9+ installed
- requests library installed
- A Scavio API key from scavio.dev
- A list of prospect company names
Walkthrough
Step 1: Search for company background and news
Run targeted searches to pull recent news, company description, and key signals about each prospect.
import os, requests, time, re
SCAVIO_KEY = os.environ['SCAVIO_API_KEY']
H = {'x-api-key': SCAVIO_KEY, 'Content-Type': 'application/json'}
URL = 'https://api.scavio.dev/api/v1/search'
def search_company(company: str, query_suffix: str, num: int = 5) -> list:
resp = requests.post(URL, headers=H,
json={'query': f'{company} {query_suffix}', 'country_code': 'us', 'num_results': num})
return resp.json().get('organic_results', [])
def get_company_news(company: str) -> list:
results = search_company(company, 'news 2026')
return [{'title': r['title'], 'snippet': r.get('snippet', ''), 'url': r['link']} for r in results]
def get_tech_stack(company: str) -> list:
results = search_company(company, 'technology stack engineering blog')
all_text = ' '.join(r.get('snippet', '') for r in results).lower()
techs = ['python', 'react', 'kubernetes', 'aws', 'gcp', 'azure', 'terraform',
'postgresql', 'mongodb', 'redis', 'docker', 'typescript', 'go', 'rust']
return [t for t in techs if t in all_text]
news = get_company_news('Stripe')
print(f'Stripe news: {len(news)} articles')
for n in news[:3]:
print(f' {n["title"][:60]}')Step 2: Extract hiring and growth signals
Search for hiring activity and funding news as indicators of company growth stage and budget availability.
def get_hiring_signals(company: str) -> dict:
results = search_company(company, 'hiring jobs careers 2026')
all_text = ' '.join(r.get('snippet', '') for r in results).lower()
roles = re.findall(r'(engineer|developer|manager|director|vp|head of)', all_text)
return {
'active_hiring': len(results) > 0,
'job_signals': len(roles),
'role_types': list(set(roles)),
'sources': [r['link'] for r in results[:3]],
}
def get_funding_signals(company: str) -> dict:
results = search_company(company, 'funding raised valuation 2026')
all_text = ' '.join(r.get('snippet', '') for r in results)
amounts = re.findall(r'\$(\d+(?:\.\d+)?\s*(?:million|billion|M|B))', all_text)
return {
'recent_funding': len(amounts) > 0,
'amounts_mentioned': amounts[:3],
'snippets': [r.get('snippet', '')[:100] for r in results[:2]],
}
hiring = get_hiring_signals('Stripe')
print(f'Hiring: {hiring["job_signals"]} role mentions, types: {hiring["role_types"]}')Step 3: Compile the prospect profile
Combine all signals into a structured prospect profile that a sales rep can review in under 30 seconds.
def research_prospect(company: str) -> dict:
news = get_company_news(company)
time.sleep(0.3)
tech = get_tech_stack(company)
time.sleep(0.3)
hiring = get_hiring_signals(company)
time.sleep(0.3)
funding = get_funding_signals(company)
profile = {
'company': company,
'top_news': [n['title'] for n in news[:3]],
'tech_stack': tech,
'hiring_active': hiring['active_hiring'],
'role_types': hiring['role_types'],
'recent_funding': funding['recent_funding'],
'funding_amounts': funding['amounts_mentioned'],
'credits_used': 4,
'cost': 0.020,
}
print(f'Prospect Profile: {company}')
print(f' News: {len(news)} articles')
for n in profile['top_news']:
print(f' - {n[:55]}')
print(f' Tech: {", ".join(tech) if tech else "Unknown"}')
print(f' Hiring: {"Active" if hiring["active_hiring"] else "No signals"} ({hiring["job_signals"]} mentions)')
print(f' Funding: {", ".join(funding["amounts_mentioned"]) if funding["amounts_mentioned"] else "None found"}')
print(f' Cost: ${profile["cost"]}')
return profile
research_prospect('Stripe')Python Example
import os, requests, time
SCAVIO_KEY = os.environ['SCAVIO_API_KEY']
H = {'x-api-key': SCAVIO_KEY, 'Content-Type': 'application/json'}
def research_prospect(company):
queries = [f'{company} news 2026', f'{company} technology stack', f'{company} hiring careers']
for q in queries:
resp = requests.post('https://api.scavio.dev/api/v1/search', headers=H,
json={'query': q, 'country_code': 'us', 'num_results': 3})
results = resp.json().get('organic_results', [])
print(f' [{q.split(company)[1].strip()}]')
for r in results[:2]:
print(f' {r["title"][:55]}')
time.sleep(0.3)
print(f' Cost: $0.015 (3 searches)')
for co in ['Stripe', 'Datadog']:
print(f'\n{co}:')
research_prospect(co)JavaScript Example
const SCAVIO_KEY = process.env.SCAVIO_API_KEY;
async function researchProspect(company) {
const queries = [`${company} news 2026`, `${company} technology stack`, `${company} hiring`];
for (const q of queries) {
const resp = await fetch('https://api.scavio.dev/api/v1/search', {
method: 'POST',
headers: { 'x-api-key': SCAVIO_KEY, 'Content-Type': 'application/json' },
body: JSON.stringify({ query: q, country_code: 'us', num_results: 3 })
});
const results = (await resp.json()).organic_results || [];
console.log(` [${q.replace(company, '').trim()}]`);
results.slice(0, 2).forEach(r => console.log(` ${r.title.slice(0, 55)}`));
}
console.log(' Cost: $0.015');
}
researchProspect('Stripe');Expected Output
Prospect Profile: Stripe
News: 5 articles
- Stripe Launches AI-Powered Fraud Detection in 2026
- Stripe Revenue Surpasses $20B Annual Run Rate
- Stripe Expands to 15 New Markets in Asia Pacific
Tech: python, react, aws, kubernetes, postgresql, redis
Hiring: Active (8 mentions)
Funding: $6.5 billion
Cost: $0.020