Documentation Index
Fetch the complete documentation index at: https://docs.cycls.com/llms.txt
Use this file to discover all available pages before exploring further.
This guide shows you how to build an agent that can search the web using Exa and synthesize the results using OpenAI. This is perfect for building research agents or bots that need real-time information.
Prerequisites
cycls installed
- OpenAI API key
- Exa API key (get one here)
First, we’ll define a Python function that uses the exa-py client to search the web.
def search_web(query):
"""Search the web for a given query."""
from exa_py import Exa
import os
exa = Exa(api_key=os.getenv("EXA_API_KEY"))
# Search and get the top 3 results with text content
response = exa.search_and_contents(
query,
num_results=3,
text=True
)
# Format results for the LLM
results = []
for r in response.results:
results.append(f"Title: {r.title}\nURL: {r.url}\nContent: {r.text[:1000]}...")
return "\n\n".join(results)
Tell OpenAI about our search tool so it knows when to use it.
tools = [{
"type": "function",
"name": "search_web",
"description": "Search the web for real-time information.",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "The search query"}
},
"required": ["query"]
}
}]
Step 3: Create the Agent
import cycls
import json
import os
@cycls.app(pip=["openai", "exa-py"], copy=[".env"])
async def app(context):
from openai import OpenAI
from exa_py import Exa
client = OpenAI()
# Define Search Tool
def search_web(query):
exa = Exa(api_key=os.getenv("EXA_API_KEY"))
response = exa.search_and_contents(query, num_results=3, text=True)
results = []
for r in response.results:
results.append(f"Title: {r.title}\nURL: {r.url}\nContent: {r.text[:1000]}...")
return "\n\n".join(results)
# Tool Schema
tools = [{
"type": "function",
"name": "search_web",
"description": "Search the web for real-time information.",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "The search query"}
},
"required": ["query"]
}
}]
# First Call to LLM
response = client.responses.create(
model="gpt-4o",
input=context.messages,
tools=tools
)
# Update history
context.messages.extend(response.output)
# Check for Function Calls
tool_called = False
for item in response.output:
if item.type == "function_call":
tool_called = True
if item.name == "search_web":
args = json.loads(item.arguments)
result = search_web(args["query"])
context.messages.append({
"type": "function_call_output",
"call_id": item.call_id,
"output": result
})
# Final Response
if tool_called:
final = client.responses.create(
model="gpt-4o",
input=context.messages
)
yield final.output_text
else:
yield response.output_text
app.local()
Full Code
Create an app.py file with the following content:
import cycls
import json
import os
@cycls.app(pip=["openai", "exa-py"], copy=[".env"])
async def app(context):
from openai import OpenAI
from exa_py import Exa
client = OpenAI()
# Define Search Tool
def search_web(query):
exa = Exa(api_key=os.getenv("EXA_API_KEY"))
response = exa.search_and_contents(query, num_results=3, text=True)
results = []
for r in response.results:
results.append(f"Title: {r.title}\nURL: {r.url}\nContent: {r.text[:1000]}...")
return "\n\n".join(results)
# Tool Schema
tools = [{
"type": "function",
"name": "search_web",
"description": "Search the web for real-time information.",
"parameters": {
"type": "object",
"properties": {
"query": {"type": "string", "description": "The search query"}
},
"required": ["query"]
}
}]
# First Call to LLM
response = client.responses.create(
model="gpt-4o",
input=context.messages,
tools=tools
)
# Update history
context.messages.extend(response.output)
# Check for Function Calls
tool_called = False
for item in response.output:
if item.type == "function_call":
tool_called = True
if item.name == "search_web":
args = json.loads(item.arguments)
result = search_web(args["query"])
context.messages.append({
"type": "function_call_output",
"call_id": item.call_id,
"output": result
})
# Final Response
if tool_called:
final = client.responses.create(
model="gpt-4o",
input=context.messages
)
yield final.output_text
else:
yield response.output_text
app.local()
Configuration
Make sure your .env file contains both API keys:
OPENAI_API_KEY=sk-...
EXA_API_KEY=...