Auramind / mobile /export_mobile.py
ibrahim256's picture
Upload mobile/export_mobile.py with huggingface_hub
5b26276 verified
#!/usr/bin/env python3
"""
Export AuraMind models for mobile deployment
Creates optimized .ptl files for PyTorch Mobile
"""
import torch
from transformers import AutoTokenizer, AutoModelForCausalLM
import os
def export_for_mobile(model_name: str, variant: str):
"""Export model for mobile deployment"""
print(f"Exporting {model_name} ({variant}) for mobile...")
# Load model
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForCausalLM.from_pretrained(
model_name,
torch_dtype=torch.float16,
device_map="cpu",
low_cpu_mem_usage=True
)
# Prepare for mobile export
model.eval()
# Create example input
example_text = "[Assistant Mode] Help me with my tasks"
example_input = tokenizer(
example_text,
return_tensors="pt",
max_length=512,
truncation=True
)["input_ids"]
# Trace the model
traced_model = torch.jit.trace(model, example_input)
# Optimize for mobile
optimized_model = torch.jit.optimize_for_mobile(traced_model)
# Save mobile-optimized model
output_path = f"auramind_{variant}_mobile.ptl"
optimized_model._save_for_lite_interpreter(output_path)
print(f"✅ Mobile model saved: {output_path}")
# Create metadata file
metadata = {
"model_name": model_name,
"variant": variant,
"tokenizer_vocab_size": tokenizer.vocab_size,
"max_length": 512,
"export_date": torch.jit.get_jit_operator_version(),
"pytorch_version": torch.__version__
}
import json
with open(f"auramind_{variant}_metadata.json", "w") as f:
json.dump(metadata, f, indent=2)
return output_path
if __name__ == "__main__":
variants = ["270m", "180m", "90m"]
for variant in variants:
export_for_mobile("zail-ai/Auramind", variant)
print("\n✅ All mobile exports completed!")