File size: 3,132 Bytes
3239c69 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 |
#!/usr/bin/env python3
"""
Test with hardcoded working models that don't require authentication
"""
import requests
def test_free_inference_alternatives():
"""Test free inference alternatives that work without authentication"""
print("π Testing inference alternatives that work without auth")
print("=" * 60)
# Test 1: Try some models that might work without auth
free_models = [
"gpt2",
"distilgpt2",
"microsoft/DialoGPT-small"
]
for model in free_models:
print(f"\nπ€ Testing {model}")
url = f"https://api-inference.huggingface.co/models/{model}"
payload = {
"inputs": "Hello, how are you today?",
"parameters": {
"max_length": 50,
"temperature": 0.7
}
}
try:
response = requests.post(url, json=payload, timeout=30)
print(f"Status: {response.status_code}")
if response.status_code == 200:
result = response.json()
print(f"β
Success: {result}")
return model
elif response.status_code == 503:
print("β³ Model loading, might work later")
else:
print(f"β Error: {response.text}")
except Exception as e:
print(f"β Exception: {e}")
return None
def test_alternative_apis():
"""Test completely different free APIs"""
print("\n" + "=" * 60)
print("TESTING ALTERNATIVE FREE APIs")
print("=" * 60)
# Note: These are examples, many might require their own API keys
alternatives = [
"OpenAI GPT (requires key)",
"Anthropic Claude (requires key)",
"Google Gemini (requires key)",
"Local Ollama (if installed)",
"Groq (free tier available)"
]
for alt in alternatives:
print(f"π {alt}")
print("\nπ‘ Recommendation: Get a free HuggingFace token from https://huggingface.co/settings/tokens")
if __name__ == "__main__":
working_model = test_free_inference_alternatives()
test_alternative_apis()
print("\n" + "=" * 60)
print("SOLUTION RECOMMENDATIONS")
print("=" * 60)
if working_model:
print(f"β
Found working model: {working_model}")
print("π§ You can update your backend to use this model")
else:
print("β No models work without authentication")
print("\nπ― IMMEDIATE SOLUTIONS:")
print("1. Get free HuggingFace token: https://huggingface.co/settings/tokens")
print("2. Set HF_TOKEN environment variable in your HuggingFace Space")
print("3. Your Space might already have proper auth - the issue is local testing")
print("4. Use the deployed Space API instead of local testing")
print("\nπ DEBUGGING STEPS:")
print("1. Check if your deployed Space has HF_TOKEN in Settings > Variables")
print("2. Test the deployed API directly (it should work)")
print("3. For local development, get your own HF token")
|