Spaces:
Running
Running
#!/usr/bin/env python3 | |
""" | |
Test script to verify API keys are working correctly | |
Run this before launching the main app to ensure all APIs are accessible | |
""" | |
import os | |
from dotenv import load_dotenv | |
import openai | |
import numpy as np | |
import soundfile as sf | |
import io | |
# Load environment variables | |
load_dotenv() | |
openai_api_key = os.environ.get("OPENAI_API_KEY") | |
def test_mistral(): | |
"""Test Mistral API""" | |
try: | |
from mistralai import Mistral | |
api_key = os.environ.get("MISTRAL_API_KEY") | |
if not api_key: | |
print("β MISTRAL_API_KEY not found in environment") | |
return False | |
client = Mistral(api_key=api_key) | |
response = client.chat.complete( | |
model="mistral-large-latest", | |
messages=[{"role": "user", "content": "Say 'Hello' in French"}] | |
) | |
print(f"β Mistral API working: {response.choices[0].message.content[:50]}...") | |
return True | |
except Exception as e: | |
print(f"β Mistral API error: {str(e)}") | |
return False | |
def test_gemini(): | |
"""Test Gemini API""" | |
try: | |
import google.generativeai as genai | |
api_key = os.environ.get("GEMINI_API_KEY") | |
if not api_key: | |
print("β οΈ GEMINI_API_KEY not found (optional fallback)") | |
return False | |
genai.configure(api_key=api_key) | |
model = genai.GenerativeModel("models/gemini-1.5-flash-latest") | |
response = model.generate_content("Say 'Hello' in French") | |
print(f"β Gemini API working: {response.text[:50]}...") | |
return True | |
except Exception as e: | |
print(f"β οΈ Gemini API error (fallback): {str(e)}") | |
return False | |
def test_groq(): | |
"""Test Groq API""" | |
try: | |
from groq import Groq | |
client = Groq(api_key=os.environ.get("GROQ_API_KEY"),) | |
if not client: | |
print("β GROQ_API_KEY not found in environment") | |
return False | |
# client = Groq(api_key=api_key) | |
# Test with a simple completion | |
response = client.chat.completions.create( | |
messages=[ | |
{ | |
"role": "user", | |
"content": "Explain the importance of fast language models", | |
} | |
], | |
model="llama-3.3-70b-versatile", | |
) | |
print(f"β Groq API working: {response.choices[0].message.content}") | |
return True | |
except Exception as e: | |
print(f"β Groq API error: {str(e)}") | |
return False | |
def test_openai_whisper(): | |
"""Test OpenAI Whisper API (STT)""" | |
if not openai_api_key: | |
print("β οΈ OPENAI_API_KEY not found (OpenAI Whisper fallback not available)") | |
return False | |
try: | |
# Generate a 0.5s dummy silent audio (16kHz mono) | |
sr = 16000 | |
duration = 0.5 | |
audio = np.zeros(int(sr * duration), dtype=np.float32) | |
buf = io.BytesIO() | |
sf.write(buf, audio, sr, format='WAV') | |
buf.seek(0) | |
openai.api_key = openai_api_key | |
response = openai.audio.transcriptions.create( | |
model="whisper-1", | |
file=("audio.wav", buf), | |
language="fr" | |
) | |
print(f"β OpenAI Whisper API working: {response.text}") | |
return True | |
except Exception as e: | |
print(f"β OpenAI Whisper API error: {str(e)}") | |
return False | |
def main(): | |
print("π Testing API Keys...\n") | |
mistral_ok = test_mistral() | |
gemini_ok = test_gemini() | |
groq_ok = test_groq() | |
openai_ok = test_openai_whisper() | |
print("\nπ Summary:") | |
if mistral_ok and (groq_ok or openai_ok): | |
print("β All required APIs are working! You can run the app.") | |
elif not mistral_ok and gemini_ok and (groq_ok or openai_ok): | |
print("β Gemini fallback and Groq/OpenAI Whisper are working. The app will use Gemini for LLM.") | |
else: | |
print("β Some required APIs are not working. Please check your API keys.") | |
if not groq_ok and not openai_ok: | |
print(" - Groq or OpenAI Whisper is required for speech-to-text") | |
if not mistral_ok and not gemini_ok: | |
print(" - Either Mistral or Gemini is required for the language model") | |
if __name__ == "__main__": | |
main() |