diff --git a/agent.py b/agent.py index cc5a58a..2fad851 100644 --- a/agent.py +++ b/agent.py @@ -3,7 +3,7 @@ from openai import OpenAI from tools import discover_tools -class OpenRouterAgent: +class GroqAgent: def __init__(self, config_path="config.yaml", silent=False): # Load configuration with open(config_path, 'r') as f: @@ -12,16 +12,16 @@ def __init__(self, config_path="config.yaml", silent=False): # Silent mode for orchestrator (suppresses debug output) self.silent = silent - # Initialize OpenAI client with OpenRouter + # Initialize OpenAI client with Groq self.client = OpenAI( - base_url=self.config['openrouter']['base_url'], - api_key=self.config['openrouter']['api_key'] + base_url=self.config['groq']['base_url'], + api_key=self.config['groq']['api_key'] ) # Discover tools dynamically self.discovered_tools = discover_tools(self.config, silent=self.silent) - # Build OpenRouter tools array + # Build Groq tools array self.tools = [tool.to_openrouter_schema() for tool in self.discovered_tools.values()] # Build tool mapping @@ -29,10 +29,10 @@ def __init__(self, config_path="config.yaml", silent=False): def call_llm(self, messages): - """Make OpenRouter API call with tools""" + """Make Groq API call with tools""" try: response = self.client.chat.completions.create( - model=self.config['openrouter']['model'], + model=self.config['groq']['model'], messages=messages, tools=self.tools ) @@ -100,11 +100,13 @@ def run(self, user_input: str): # Add the response to messages assistant_message = response.choices[0].message - messages.append({ + message_to_append = { "role": "assistant", - "content": assistant_message.content, - "tool_calls": assistant_message.tool_calls - }) + "content": assistant_message.content + } + if assistant_message.tool_calls: + message_to_append['tool_calls'] = assistant_message.tool_calls + messages.append(message_to_append) # Capture assistant content for full response if assistant_message.content: diff --git a/config.yaml b/config.yaml index a463bac..64e39db 100644 --- a/config.yaml +++ b/config.yaml @@ -1,12 +1,12 @@ -# OpenRouter API settings -openrouter: +# Groq API settings +groq: api_key: "YOUR KEY" - base_url: "https://openrouter.ai/api/v1" + base_url: "https://api.groq.com/openai/v1" # IMPORTANT: When selecting a model, ensure it has a high context window (200k+ tokens recommended) # The orchestrator can generate large amounts of results from multiple agents that need to be # processed together during synthesis. Low context window models may fail or truncate results. - model: "moonshotai/kimi-k2" + model: "llama3-70b-8192" # System prompt for the agent system_prompt: | diff --git a/main.py b/main.py index 1d2d9fc..5d5fa56 100644 --- a/main.py +++ b/main.py @@ -1,21 +1,21 @@ -from agent import OpenRouterAgent +from agent import GroqAgent def main(): - """Main entry point for the OpenRouter agent""" - print("OpenRouter Agent with DuckDuckGo Search") + """Main entry point for the Groq agent""" + print("Groq Agent with DuckDuckGo Search") print("Type 'quit', 'exit', or 'bye' to exit") print("-" * 50) try: - agent = OpenRouterAgent() + agent = GroqAgent() print("Agent initialized successfully!") - print(f"Using model: {agent.config['openrouter']['model']}") - print("Note: Make sure to set your OpenRouter API key in config.yaml") + print(f"Using model: {agent.config['groq']['model']}") + print("Note: Make sure to set your Groq API key in config.yaml") print("-" * 50) except Exception as e: print(f"Error initializing agent: {e}") print("Make sure you have:") - print("1. Set your OpenRouter API key in config.yaml") + print("1. Set your Groq API key in config.yaml") print("2. Installed all dependencies with: pip install -r requirements.txt") return diff --git a/orchestrator.py b/orchestrator.py index 7dba0c7..e47e3ad 100644 --- a/orchestrator.py +++ b/orchestrator.py @@ -4,7 +4,7 @@ import threading from concurrent.futures import ThreadPoolExecutor, as_completed from typing import List, Dict, Any -from agent import OpenRouterAgent +from agent import GroqAgent class TaskOrchestrator: def __init__(self, config_path="config.yaml", silent=False): @@ -26,7 +26,7 @@ def decompose_task(self, user_input: str, num_agents: int) -> List[str]: """Use AI to dynamically generate different questions based on user input""" # Create question generation agent - question_agent = OpenRouterAgent(silent=True) + question_agent = GroqAgent(silent=True) # Get question generation prompt from config prompt_template = self.config['orchestrator']['question_generation_prompt'] @@ -77,7 +77,7 @@ def run_agent_parallel(self, agent_id: int, subtask: str) -> Dict[str, Any]: self.update_agent_progress(agent_id, "PROCESSING...") # Use simple agent like in main.py - agent = OpenRouterAgent(silent=True) + agent = GroqAgent(silent=True) start_time = time.time() response = agent.run(subtask) @@ -128,7 +128,7 @@ def _aggregate_consensus(self, responses: List[str], _results: List[Dict[str, An return responses[0] # Create synthesis agent to combine all responses - synthesis_agent = OpenRouterAgent(silent=True) + synthesis_agent = GroqAgent(silent=True) # Build agent responses section agent_responses_text = ""