Spaces:
Runtime error
Runtime error
File size: 5,913 Bytes
0af0a55 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 |
"""Central AI Hub for coordinating all AI agents and operations."""
import os
import uuid
import asyncio
from loguru import logger
from utils.llm_orchestrator import LLMOrchestrator
from ctransformers import AutoModelForCausalLM
import torch
from huggingface_hub import hf_hub_download
class CentralAIHub:
def __init__(self, api_key=None, model_path=None):
"""Initialize the Central AI Hub."""
self.api_key = api_key
self.model_path = model_path
self.cache_dir = os.path.join(os.getcwd(), ".cache")
os.makedirs(self.cache_dir, exist_ok=True)
self.llm = None
self.llm_orchestrator = None
self.agents = {
'code_analysis': 'CodeAnalysisAgent',
'code_generation': 'CodeGenerationAgent',
'error_fixing': 'ErrorFixingAgent'
}
self.tasks = {}
self.active_agents = {}
self._initialized = False
self.max_retries = 3
self.retry_delay = 2
async def _initialize_llm_client(self):
"""Initialize LLM client with retry mechanism and proper model selection."""
try:
if self.model_path:
# Load local model
logger.info(f"Loading local model from {self.model_path}")
self.llm = AutoModelForCausalLM.from_pretrained(
self.model_path,
model_type="qwen",
cache_dir=self.cache_dir,
local_files_only=True
)
logger.info(f"Local model loaded successfully")
else:
# Download model from Hugging Face Hub
model_name = "Qwen/Qwen2.5-14B-Instruct-GGUF"
model_filename = "Qwen2.5-14B_Uncensored_Instruct-Q8_0.gguf"
cached_model_path = os.path.join(self.cache_dir, model_filename)
if not os.path.exists(cached_model_path):
logger.info(f"Downloading model {model_filename} from Hugging Face Hub")
hf_hub_download(
repo_id=model_name,
filename=model_filename,
cache_dir=self.cache_dir,
local_files_only=False
)
logger.info(f"Model downloaded to {cached_model_path}")
else:
logger.info(f"Using cached model at {cached_model_path}")
self.llm = AutoModelForCausalLM.from_pretrained(
cached_model_path,
model_type="qwen",
local_files_only=True
)
logger.info(f"Model loaded successfully")
self.llm_orchestrator = LLMOrchestrator(self.llm)
return True
except Exception as e:
logger.error(f"Failed to initialize LLM client: {e}")
return False
async def start(self):
"""Start the Central AI Hub and initialize agents only after successful LLM connection."""
if self._initialized:
return
logger.info("Starting Central AI Hub...")
if not await self._initialize_llm_client(): # Initialize LLM client first
raise Exception("Failed to initialize LLM client.")
for agent_type, agent_class in self.agents.items():
try:
await self.initialize_agent(agent_class)
logger.info(f"Initialized {agent_class}")
except Exception as e:
logger.error(f"Failed to initialize agent {agent_class}: {e}")
raise # Re-raise the exception to halt the startup
self._initialized = True
logger.info("Central AI Hub initialization complete.")
async def delegate_task(self, task):
"""Delegate a task to appropriate agent."""
if not task:
raise Exception("Task cannot be None")
task_id = str(uuid.uuid4())
agent_type = await self.select_agent(task)
if not agent_type:
raise Exception(f"No suitable agent found for task type: {task['type']}")
self.tasks[task_id] = {
'status': 'active',
'task': task,
'agent': agent_type,
'result': None
}
# Process task asynchronously
asyncio.create_task(self._process_task(task_id))
return task_id
async def _process_task(self, task_id):
"""Process a task asynchronously."""
task_info = self.tasks[task_id]
try:
# Simulate task processing
await asyncio.sleep(2) # Simulated work
task_info['status'] = 'completed'
task_info['result'] = "Task processed successfully"
logger.info(f"Task {task_id} completed")
except Exception as e:
task_info['status'] = 'failed'
task_info['error'] = str(e)
logger.error(f"Error processing task {task_id}: {str(e)}")
async def get_task_status(self, task_id):
"""Get status of a task."""
task_info = self.tasks.get(task_id, {'status': 'not_found'})
return task_info
async def select_agent(self, task):
"""Select appropriate agent for task."""
return self.agents.get(task['type'])
async def initialize_agent(self, agent_id):
"""Initialize an agent."""
if agent_id not in self.agents.values():
raise ValueError(f"Agent {agent_id} not found")
self.active_agents[agent_id] = True
async def shutdown(self):
"""Shutdown the Central AI Hub."""
logger.info("Shutting down Central AI Hub...")
# Clean up active agents
self.active_agents.clear()
# Cancel any pending tasks
for task_id, task in self.tasks.items():
if task['status'] == 'active':
task['status'] = 'cancelled'
|