import os from fastapi import FastAPI, Header, HTTPException, Depends from pydantic import BaseModel from text_humanizer import TextHumanizer, download_nltk_resources from aitext_detector import AdvancedAITextDetector import spacy API_KEY = os.environ.get("API_KEY", "dev-key") PORT = int(os.environ.get("PORT", 7860)) app = FastAPI() humanizer = None detector = None # ========================= # Request / Response Models # ========================= class HumanizeReq(BaseModel): text: str use_passive: bool = False use_synonyms: bool = False class DetectReq(BaseModel): text: str # ========================= # API Key verification # ========================= def verify_key(x_api_key: str = Header(None)): if x_api_key != API_KEY: raise HTTPException(status_code=403, detail="Forbidden") return True # ========================= # Routes # ========================= @app.get("/") def greet_json(): return {"Hello": "World!"} @app.on_event("startup") def startup(): download_nltk_resources() try: spacy.load("en_core_web_sm") except OSError: spacy.cli.download("en_core_web_sm") global humanizer, detector humanizer = TextHumanizer() detector = AdvancedAITextDetector() @app.post("/humanize") def humanize(req: HumanizeReq, _=Depends(verify_key)): return { "humanized": humanizer.humanize_text( req.text, req.use_passive, req.use_synonyms ) } @app.post("/detect") def detect(req: DetectReq, _=Depends(verify_key)): """ Detect whether the text is AI-generated or human-written. """ return detector.detect_ai_text(req.text) # if __name__ == "__main__": # import uvicorn # uvicorn.run(app, host="0.0.0.0", port=PORT)