Use AutoStack's curated prompt and resource library directly from your agents, workflows, and apps. Free to use. No API key needed. CORS enabled — call from anywhere.
https://aiautostack.app/api/v1/api/v1/statusReturns API health, version, available endpoints, and platform stats (total prompts, resources, ecosystem size).
curl https://aiautostack.app/api/v1/status
{
"status": "ok",
"version": "1.0.0",
"stats": {
"prompts": 38,
"resources": 46,
"ecosystem_prompts": "160000+",
"ecosystem_resources": "1000000+"
}
}// HTTP Request node → GET // URL: https://aiautostack.app/api/v1/prompts?category=technique&limit=10 // Then use the prompt text in your LLM node
import requests
# Get the best CoT prompt
res = requests.get(
"https://aiautostack.app/api/v1/prompts",
params={"category": "technique", "q": "chain of thought"}
)
prompt = res.json()["prompts"][0]
print(prompt["title"]) # Chain-of-Thought Master Templatefrom crewai_tools import BaseTool
import requests
class AutoStackTool(BaseTool):
name: str = "AutoStack Prompt Hub"
description: str = "Search AutoStack for the best prompts"
def _run(self, query: str) -> str:
r = requests.get(
"https://aiautostack.app/api/v1/search",
params={"q": query, "type": "prompts"}
)
return r.json()["results"]["prompts"][0]["title"]from langchain.tools import tool
@tool
def get_autostack_prompt(query: str) -> str:
"""Search AutoStack for the best prompts for a task"""
import requests
r = requests.get(
"https://aiautostack.app/api/v1/search",
params={"q": query, "type": "prompts"}
)
prompts = r.json().get("results", {}).get("prompts", [])
return prompts[0]["title"] if prompts else "No results"import requests
# Get real-time AI news
news = requests.get("https://aiautostack.app/api/v1/feeds/news")
for article in news.json()["articles"][:5]:
print(f"[{article['source']}] {article['title']}")
# Get trending AI GitHub repos
repos = requests.get("https://aiautostack.app/api/v1/feeds/github")
for repo in repos.json()["repos"][:5]:
print(f"⭐ {repo['stars']} - {repo['name']}")