Not sure A very Interesting topic, i tried encoding base code in Israeli or Egyptian also programed for DSL line for Dataline transmission?
#7
by
ebearden
- opened
from transformers import AutoTokenizer, AutoModelForCausalLM
def generate_text(prompt, language):
try:
# Load the multilingual tokenizer and model
tokenizer = AutoTokenizer.from_pretrained("bert-base-multilingual-cased")
model = AutoModelForCausalLM.from_pretrained("bert-base-multilingual-cased")
# Generate text
inputs = tokenizer(prompt, return_tensors="pt")
outputs = model.generate(**inputs, max_new_tokens=100)
return tokenizer.decode(outputs[0], skip_special_tokens=True)
except Exception as e:
print(f"Error generating text: {e}")
Define prompts in different languages
prompts = {
"Hebrew": "ืืื ืื ืืืจืช ืฉื x^2?",
"Arabic": "ู
ุง ูู ุงูู
ุดุชู ู
ู x^2ุ",
"English": "What is the derivative of x^2?"
}
Generate text for each prompt
for language, prompt in prompts.items():
print(f"Language: {language}")
print(f"Prompt: {prompt}")
print(f"Response: {generate_text(prompt, language)}")
print()