mirxakamran893 commited on
Commit
55e3b14
Β·
verified Β·
1 Parent(s): 0cc667e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -20
app.py CHANGED
@@ -4,11 +4,11 @@ import os
4
  import faiss
5
  import numpy as np
6
  import json
7
- from fastapi import FastAPI, Request
8
  from pydantic import BaseModel
9
  from sentence_transformers import SentenceTransformer
10
 
11
- # βœ… Load vector data
12
  with open("texts.json", "r", encoding="utf-8") as f:
13
  texts = json.load(f)
14
 
@@ -18,13 +18,11 @@ embed_model = SentenceTransformer("all-MiniLM-L6-v2")
18
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
19
  MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"
20
 
21
- # βœ… Semantic search
22
  def get_context(query, top_k=5):
23
  query_vec = embed_model.encode([query])
24
  D, I = index.search(np.array(query_vec), top_k)
25
  return "\n".join([texts[i] for i in I[0]])
26
 
27
- # βœ… Chatbot response
28
  def chat_fn(message, history):
29
  headers = {
30
  "Authorization": f"Bearer {API_KEY}",
@@ -32,7 +30,7 @@ def chat_fn(message, history):
32
  }
33
 
34
  context = get_context(message)
35
- messages = [{"role": "system", "content": f"You are CODEX Assistant by Mirxa Kamran. Use this context:\n{context}"}]
36
 
37
  for user, assistant in history:
38
  messages.append({"role": "user", "content": user})
@@ -51,31 +49,25 @@ def chat_fn(message, history):
51
 
52
  return reply
53
 
54
- # βœ… Gradio UI
55
- demo = gr.ChatInterface(
56
  fn=chat_fn,
57
- title="πŸ’» CODEX Assistant by Mirxa Kamran",
58
- description="Chat with a context-aware AI code assistant.",
59
  theme="soft"
60
  )
61
 
62
- # βœ… FastAPI app
63
  app = FastAPI()
64
 
65
- # βœ… Mount Gradio on root path
66
- app = gr.mount_gradio_app(app, demo, path="/")
67
 
68
- # βœ… FastAPI POST API endpoint
69
  class ChatRequest(BaseModel):
70
  message: str
71
  history: list = []
72
 
73
  @app.post("/chat")
74
  def api_chat(req: ChatRequest):
75
- reply = chat_fn(req.message, req.history)
76
- return {"response": reply}
77
-
78
- # βœ… Run manually in local dev or on Spaces
79
- if __name__ == "__main__":
80
- import uvicorn
81
- uvicorn.run(app, host="0.0.0.0", port=7860)
 
4
  import faiss
5
  import numpy as np
6
  import json
7
+ from fastapi import FastAPI
8
  from pydantic import BaseModel
9
  from sentence_transformers import SentenceTransformer
10
 
11
+ # Load data
12
  with open("texts.json", "r", encoding="utf-8") as f:
13
  texts = json.load(f)
14
 
 
18
  API_KEY = os.environ.get("OPENROUTER_API_KEY")
19
  MODEL = "qwen/qwen-2.5-coder-32b-instruct:free"
20
 
 
21
  def get_context(query, top_k=5):
22
  query_vec = embed_model.encode([query])
23
  D, I = index.search(np.array(query_vec), top_k)
24
  return "\n".join([texts[i] for i in I[0]])
25
 
 
26
  def chat_fn(message, history):
27
  headers = {
28
  "Authorization": f"Bearer {API_KEY}",
 
30
  }
31
 
32
  context = get_context(message)
33
+ messages = [{"role": "system", "content": f"You are CODEX Assistant. Use this:\n{context}"}]
34
 
35
  for user, assistant in history:
36
  messages.append({"role": "user", "content": user})
 
49
 
50
  return reply
51
 
52
+ # βœ… Gradio Interface
53
+ gradio_ui = gr.ChatInterface(
54
  fn=chat_fn,
55
+ title="πŸ’» CODEX Assistant",
56
+ description="Ask me coding or technical questions!",
57
  theme="soft"
58
  )
59
 
60
+ # βœ… FastAPI App
61
  app = FastAPI()
62
 
63
+ # βœ… Mount Gradio at `/`
64
+ app = gr.mount_gradio_app(app, gradio_ui, path="/")
65
 
66
+ # βœ… FastAPI API Endpoint
67
  class ChatRequest(BaseModel):
68
  message: str
69
  history: list = []
70
 
71
  @app.post("/chat")
72
  def api_chat(req: ChatRequest):
73
+ return {"response": chat_fn(req.message, req.history)}