Jay-Rajput commited on
Commit
ff38dbb
·
1 Parent(s): c356965
Files changed (3) hide show
  1. app.py +50 -72
  2. app_old.py +72 -0
  3. requirements.txt +3 -1
app.py CHANGED
@@ -1,72 +1,50 @@
1
- import os
2
- from fastapi import FastAPI, Header, HTTPException, Depends
3
- from pydantic import BaseModel
4
- from text_humanizer import TextHumanizer, download_nltk_resources
5
- from aitext_detector import AdvancedAITextDetector
6
- import spacy
7
-
8
- API_KEY = os.environ.get("API_KEY", "dev-key")
9
- PORT = int(os.environ.get("PORT", 7860))
10
-
11
- app = FastAPI()
12
- humanizer = None
13
- detector = None
14
-
15
- # =========================
16
- # Request / Response Models
17
- # =========================
18
- class HumanizeReq(BaseModel):
19
- text: str
20
- use_passive: bool = False
21
- use_synonyms: bool = False
22
-
23
- class DetectReq(BaseModel):
24
- text: str
25
-
26
- # =========================
27
- # API Key verification
28
- # =========================
29
- def verify_key(x_api_key: str = Header(None)):
30
- if x_api_key != API_KEY:
31
- raise HTTPException(status_code=403, detail="Forbidden")
32
- return True
33
-
34
- # =========================
35
- # Routes
36
- # =========================
37
- @app.get("/")
38
- def greet_json():
39
- return {"Hello": "World!"}
40
-
41
- @app.on_event("startup")
42
- def startup():
43
- download_nltk_resources()
44
- try:
45
- spacy.load("en_core_web_sm")
46
- except OSError:
47
- spacy.cli.download("en_core_web_sm")
48
-
49
- global humanizer, detector
50
- humanizer = TextHumanizer()
51
- detector = AdvancedAITextDetector()
52
-
53
- @app.post("/humanize")
54
- def humanize(req: HumanizeReq, _=Depends(verify_key)):
55
- return {
56
- "humanized": humanizer.humanize_text(
57
- req.text,
58
- req.use_passive,
59
- req.use_synonyms
60
- )
61
- }
62
-
63
- @app.post("/detect")
64
- def detect(req: DetectReq, _=Depends(verify_key)):
65
- """
66
- Detect whether the text is AI-generated or human-written.
67
- """
68
- return detector.detect_ai_text(req.text)
69
-
70
- # if __name__ == "__main__":
71
- # import uvicorn
72
- # uvicorn.run(app, host="0.0.0.0", port=PORT)
 
1
+ import gradio as gr
2
+ from transformers import AutoTokenizer, AutoModelForCausalLM, pipeline
3
+
4
+ MODEL_NAME = "mistralai/Mistral-7B-Instruct-v0.2"
5
+
6
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
7
+ model = AutoModelForCausalLM.from_pretrained(
8
+ MODEL_NAME,
9
+ device_map="auto",
10
+ torch_dtype="auto"
11
+ )
12
+
13
+ generator = pipeline(
14
+ "text-generation",
15
+ model=model,
16
+ tokenizer=tokenizer,
17
+ max_length=512,
18
+ temperature=0.7,
19
+ top_p=0.9,
20
+ repetition_penalty=1.1
21
+ )
22
+
23
+ def humanize_text(text):
24
+ if not text.strip():
25
+ return "⚠️ Please enter some text."
26
+
27
+ prompt = f"""Rewrite the following text to sound natural, fluent, and human-like.
28
+ Preserve meaning, names, and numbers. Avoid robotic tone.
29
+ Use contractions, natural sentence flow, and varied structure.
30
+ Do not explain, only rewrite.
31
+
32
+ Input: \"\"\"{text}\"\"\"
33
+ Rewritten:"""
34
+
35
+ output = generator(prompt, num_return_sequences=1)[0]["generated_text"]
36
+ # Strip off prompt echo if model repeats
37
+ if "Rewritten:" in output:
38
+ output = output.split("Rewritten:")[-1].strip()
39
+ return output
40
+
41
+ demo = gr.Interface(
42
+ fn=humanize_text,
43
+ inputs=gr.Textbox(lines=6, placeholder="Paste your text here..."),
44
+ outputs=gr.Textbox(label="Humanized Output"),
45
+ title="AI Humanizer",
46
+ description="Drop text and get a more natural, human-like version. Powered by Mistral-7B-Instruct."
47
+ )
48
+
49
+ if __name__ == "__main__":
50
+ demo.launch()
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
app_old.py ADDED
@@ -0,0 +1,72 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ from fastapi import FastAPI, Header, HTTPException, Depends
3
+ from pydantic import BaseModel
4
+ from text_humanizer import TextHumanizer, download_nltk_resources
5
+ from aitext_detector import AdvancedAITextDetector
6
+ import spacy
7
+
8
+ API_KEY = os.environ.get("API_KEY", "dev-key")
9
+ PORT = int(os.environ.get("PORT", 7860))
10
+
11
+ app = FastAPI()
12
+ humanizer = None
13
+ detector = None
14
+
15
+ # =========================
16
+ # Request / Response Models
17
+ # =========================
18
+ class HumanizeReq(BaseModel):
19
+ text: str
20
+ use_passive: bool = False
21
+ use_synonyms: bool = False
22
+
23
+ class DetectReq(BaseModel):
24
+ text: str
25
+
26
+ # =========================
27
+ # API Key verification
28
+ # =========================
29
+ def verify_key(x_api_key: str = Header(None)):
30
+ if x_api_key != API_KEY:
31
+ raise HTTPException(status_code=403, detail="Forbidden")
32
+ return True
33
+
34
+ # =========================
35
+ # Routes
36
+ # =========================
37
+ @app.get("/")
38
+ def greet_json():
39
+ return {"Hello": "World!"}
40
+
41
+ @app.on_event("startup")
42
+ def startup():
43
+ download_nltk_resources()
44
+ try:
45
+ spacy.load("en_core_web_sm")
46
+ except OSError:
47
+ spacy.cli.download("en_core_web_sm")
48
+
49
+ global humanizer, detector
50
+ humanizer = TextHumanizer()
51
+ detector = AdvancedAITextDetector()
52
+
53
+ @app.post("/humanize")
54
+ def humanize(req: HumanizeReq, _=Depends(verify_key)):
55
+ return {
56
+ "humanized": humanizer.humanize_text(
57
+ req.text,
58
+ req.use_passive,
59
+ req.use_synonyms
60
+ )
61
+ }
62
+
63
+ @app.post("/detect")
64
+ def detect(req: DetectReq, _=Depends(verify_key)):
65
+ """
66
+ Detect whether the text is AI-generated or human-written.
67
+ """
68
+ return detector.detect_ai_text(req.text)
69
+
70
+ # if __name__ == "__main__":
71
+ # import uvicorn
72
+ # uvicorn.run(app, host="0.0.0.0", port=PORT)
requirements.txt CHANGED
@@ -1,7 +1,9 @@
1
  fastapi
2
  uvicorn[standard]
3
  torch
4
- transformers
 
 
5
  numpy
6
  scipy
7
  spacy
 
1
  fastapi
2
  uvicorn[standard]
3
  torch
4
+ transformers>=4.40.0
5
+ accelerate
6
+ gradio
7
  numpy
8
  scipy
9
  spacy