from composio_openai import ComposioToolSet, Action
from openai import OpenAI
# Initialize OpenAI client with API key
client = OpenAI()
# Initialize Composio toolset
toolset = ComposioToolSet()
# Get the Tavily tool with all available parameters
tools = toolset.get_tools(actions=[Action.TAVILY_TAVILY_SEARCH])
# Define the market research task with specific parameters
task = {
"query": "Analyze the competitive landscape of AI-powered customer service solutions in 2024",
"search_depth": "advanced",
"include_answer": True,
"max_results": 10,
# Focus on relevant industry sources
"include_domains": [
"techcrunch.com",
"venturebeat.com",
"forbes.com",
"gartner.com",
"marketsandmarkets.com"
],
}
# Send request to LLM
messages = [{"role": "user", "content": str(task)}]
response = client.chat.completions.create(
model="gpt-4",
messages=messages,
tools=tools,
tool_choice="auto"
)
# Handle tool call via Composio
execution_result = None
response_message = response.choices[0].message
if response_message.tool_calls:
execution_result = toolset.handle_tool_calls(response)
print("Execution Result:", execution_result)
messages.append(response_message)
# Add tool response messages
for tool_call, result in zip(response_message.tool_calls, execution_result):
messages.append({
"role": "tool",
"content": str(result),
"tool_call_id": tool_call.id
})
# Get final response from LLM
final_response = client.chat.completions.create(
model="gpt-4",
messages=messages
)
print("\nMarket Research Summary:")
print(final_response.choices[0].message.content)
else:
print("LLM responded directly (no tool used):", response_message.content)