Reddit's public search is powerful but its official JSON endpoints are rate-limited, unpaged, and occasionally missing. For monitoring agents, research pipelines, and RAG systems that need fresh community data, a search API that handles the scrape layer is the difference between a weekend project and a production pipeline. This tutorial walks through authenticating, sending a Reddit search request, and iterating cursor pages to collect posts in Python.
Prerequisites
- Python 3.8 or higher installed
- requests library installed (pip install requests)
- A Scavio API key from scavio.dev
- A query you want to search (keyword or subreddit-scoped phrase)
Walkthrough
Step 1: Install the requests library
requests is the only dependency needed for this tutorial.
pip install requestsStep 2: Set your API key
Keep credentials out of source by reading from an environment variable.
import os
API_KEY = os.environ["SCAVIO_API_KEY"]Step 3: Send the Reddit search request
POST to /api/v1/reddit/search with your query and optional sort. Reddit requests take 5-15 seconds, so set a longer client timeout.
import requests
response = requests.post(
"https://api.scavio.dev/api/v1/reddit/search",
headers={"Authorization": f"Bearer {API_KEY}"},
json={"query": "best python web frameworks 2026", "sort": "new"},
timeout=30,
)
data = response.json()Step 4: Iterate posts and follow the cursor
Posts live under data.posts. When data.nextCursor is not null, pass it as cursor to fetch the next page.
for post in data["data"]["posts"]:
print(f"r/{post['subreddit']} -- {post['title']}")
next_cursor = data["data"].get("nextCursor")
if next_cursor:
# call again with {"query": ..., "cursor": next_cursor}
passPython Example
import os
import requests
API_KEY = os.environ["SCAVIO_API_KEY"]
ENDPOINT = "https://api.scavio.dev/api/v1/reddit/search"
def search_reddit(query: str, sort: str = "relevance"):
posts, cursor = [], None
while True:
body = {"query": query, "sort": sort}
if cursor:
body["cursor"] = cursor
r = requests.post(
ENDPOINT,
headers={"Authorization": f"Bearer {API_KEY}"},
json=body,
timeout=30,
)
r.raise_for_status()
data = r.json()["data"]
posts.extend(data["posts"])
cursor = data.get("nextCursor")
if not cursor or len(posts) >= 50:
break
return posts
results = search_reddit("fastapi vs django 2026", sort="new")
for p in results[:10]:
print(f"{p['score']:>6} r/{p['subreddit']} {p['title']}")JavaScript Example
const API_KEY = process.env.SCAVIO_API_KEY;
const ENDPOINT = "https://api.scavio.dev/api/v1/reddit/search";
async function searchReddit(query, sort = "relevance") {
const posts = [];
let cursor;
while (true) {
const body = { query, sort };
if (cursor) body.cursor = cursor;
const r = await fetch(ENDPOINT, {
method: "POST",
headers: {
Authorization: `Bearer ${API_KEY}`,
"Content-Type": "application/json",
},
body: JSON.stringify(body),
});
const { data } = await r.json();
posts.push(...data.posts);
cursor = data.nextCursor;
if (!cursor || posts.length >= 50) break;
}
return posts;
}
const posts = await searchReddit("fastapi vs django 2026", "new");
posts.slice(0, 10).forEach((p) =>
console.log(`r/${p.subreddit} -- ${p.title}`)
);Expected Output
{
"data": {
"searchQuery": "fastapi vs django 2026",
"totalResults": 14,
"nextCursor": "eyJjYW5kaWRhdGVzX3JldH...",
"posts": [
{
"position": 0,
"id": "t3_1smb9du",
"title": "FastAPI vs Django in 2026",
"subreddit": "Python",
"author": "python_dev",
"timestamp": "2026-04-15T16:34:40+0000",
"nsfw": false
}
]
}
}