Spaces:
Sleeping
Sleeping
import os | |
from ctransformers import AutoModelForCausalLM | |
# Define the model repository and file | |
model_repo = "TheBloke/OpenHermes-2-Mistral-7B-GGUF" | |
model_file = "openhermes-2-mistral-7b.Q4_K_M.gguf" | |
# Download the model using ctransformers | |
print(f"Downloading {model_file} from {model_repo}...") | |
model = AutoModelForCausalLM.from_pretrained( | |
model_repo, | |
model_file=model_file, | |
model_type="mistral", | |
# Uncomment the following line if you have a CUDA-capable GPU | |
# gpu_layers=50 | |
) | |
print("Model downloaded and loaded successfully.") | |
# Test the model with a simple prompt | |
prompt = "AI is going to" | |
response = model(prompt) | |
print(f"Prompt: {prompt}\nResponse: {response}") |