Update app.py
Browse files
app.py
CHANGED
|
@@ -24,13 +24,25 @@ model = AutoModelForCausalLM.from_pretrained("codeparrot/codeparrot-small-text-t
|
|
| 24 |
def create_docstring(gen_prompt):
|
| 25 |
return "\"\"\"\n" + gen_prompt + "\n\"\"\"\n\n"
|
| 26 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 27 |
def generate_code(gen_prompt, max_tokens, temperature=0.6, seed=42):
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
return generated_text
|
| 33 |
|
|
|
|
| 34 |
def save_to_text_file(output_text):
|
| 35 |
with open("generated_code.txt", "w") as file:
|
| 36 |
file.write(output_text)
|
|
|
|
| 24 |
def create_docstring(gen_prompt):
|
| 25 |
return "\"\"\"\n" + gen_prompt + "\n\"\"\"\n\n"
|
| 26 |
|
| 27 |
+
def validate_inputs(gen_prompt, max_tokens, temperature, seed):
|
| 28 |
+
# Adicione a l贸gica de valida莽茫o aqui
|
| 29 |
+
if not gen_prompt:
|
| 30 |
+
raise ValueError("English instructions cannot be empty.")
|
| 31 |
+
if max_tokens <= 0 or max_tokens > 256:
|
| 32 |
+
raise ValueError("Number of tokens to generate must be between 1 and 256.")
|
| 33 |
+
if temperature < 0 or temperature > 2.5:
|
| 34 |
+
raise ValueError("Temperature must be between 0 and 2.5.")
|
| 35 |
+
if seed < 0 or seed > 1000:
|
| 36 |
+
raise ValueError("Random seed must be between 0 and 1000.")
|
| 37 |
+
|
| 38 |
def generate_code(gen_prompt, max_tokens, temperature=0.6, seed=42):
|
| 39 |
+
validate_inputs(gen_prompt, max_tokens, temperature, seed)
|
| 40 |
+
|
| 41 |
+
# Resto do c贸digo de gera莽茫o de c贸digo aqui
|
| 42 |
+
|
| 43 |
return generated_text
|
| 44 |
|
| 45 |
+
|
| 46 |
def save_to_text_file(output_text):
|
| 47 |
with open("generated_code.txt", "w") as file:
|
| 48 |
file.write(output_text)
|