File size: 1,924 Bytes
1a3fc34
60a2af1
1a3fc34
d02548d
 
 
60a2af1
 
 
1a3fc34
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
60a2af1
 
 
 
 
 
 
 
 
 
 
 
1a3fc34
60a2af1
 
d02548d
60a2af1
 
 
 
d02548d
60a2af1
 
 
 
 
d02548d
60a2af1
 
d02548d
60a2af1
 
75eb629
60a2af1
 
 
d02548d
60a2af1
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
import os
import json
import requests
from lexer import lexer
from parser import Parser
from semantico import AnalizadorSemantico
from codigo_intermedio import GeneradorIntermedio
from sugerencias_nlp import procesar_comentarios

HF_TOKEN = os.environ.get("HF_TOKEN", "")
HEADERS = {"Authorization": f"Bearer {HF_TOKEN}"}
API_URL = "https://api-inference.huggingface.co/models/gpt2"

def sugerencia_nlp_error(error_msg):
    payload = {
        "inputs": f"ERROR: {error_msg}\nSUGERENCIA:",
        "parameters": {
            "max_new_tokens": 30,
            "temperature": 0.8,
            "return_full_text": False
        }
    }
    response = requests.post(API_URL, headers=HEADERS, json=payload)
    if response.status_code == 200:
        return response.json()[0]["generated_text"].strip()
    return f"(sin sugerencia: {response.status_code})"

def main():
    with open("entrada.txt", "r", encoding="utf-8") as f:
        codigo = f.read()

    tokens = lexer(codigo)
    parser = Parser(tokens)
    ast = parser.parse()
    semantico = AnalizadorSemantico(ast)
    resultado = semantico.analizar()

    errores_ext = [
        {"mensaje": err, "sugerencia": sugerencia_nlp_error(err)}
        for err in resultado["errores_semanticos"]
    ]

    comentarios_ext = [
        {"comentario": c, "sugerencia": s}
        for c, s in procesar_comentarios(codigo)
    ]

    analisis_completo = {
        "variables_declaradas": resultado["variables_declaradas"],
        "errores_semanticos": errores_ext,
        "comentarios": comentarios_ext
    }

    with open("analisis.json", "w", encoding="utf-8") as f:
        json.dump(analisis_completo, f, indent=2)

    generador = GeneradorIntermedio()
    intermedio = generador.generar(ast)

    with open("codigo_intermedio.txt", "w", encoding="utf-8") as f:
        for linea in intermedio:
            f.write(linea + "\n")

if __name__ == "__main__":
    main()