Spaces:
Sleeping
Sleeping
File size: 2,247 Bytes
de4bc49 710ef9f de4bc49 710ef9f de4bc49 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 |
import os
from transformers import AutoTokenizer
from peft import AutoPeftModelForQuestionAnswering
from huggingface_hub import login
def main():
# Get token from environment (using your existing roberta_token)
hf_token = os.environ.get('roberta_token')
if not hf_token:
print("β roberta_token not found in environment!")
print("Make sure roberta_token is set in your Space secrets.")
return
try:
print("π Logging into Hugging Face Hub...")
login(token=hf_token)
print("β
Login successful!")
print("π Loading trained model from ./model_output...")
# Check if model exists
if not os.path.exists("./model_output"):
print("β ./model_output directory not found!")
print("Make sure you've run training first.")
return
# Load your already-trained model
model = AutoPeftModelForQuestionAnswering.from_pretrained("./model_output")
tokenizer = AutoTokenizer.from_pretrained("./model_output")
print("β
Model loaded successfully!")
# Push to Hub
model_name = "AvocadoMuffin/roberta-cuad-qa"
print(f"β¬οΈ Pushing model to Hub: {model_name}")
model.push_to_hub(model_name, private=False)
tokenizer.push_to_hub(model_name, private=False)
print(f"π SUCCESS! Model pushed to: https://huggingface.co/{model_name}")
# Also push training info if it exists
training_info_path = "./model_output/training_info.json"
if os.path.exists(training_info_path):
from huggingface_hub import upload_file
upload_file(
path_or_fileobj=training_info_path,
path_in_repo="training_info.json",
repo_id=model_name,
repo_type="model"
)
print("π Training info also uploaded!")
except Exception as e:
print(f"β Error: {str(e)}")
print("Common issues:")
print("- Invalid token")
print("- Model name already exists (try a different name)")
print("- Network issues")
if __name__ == "__main__":
main() |