Niansuh commited on
Commit
3dbe7a0
·
verified ·
1 Parent(s): 679118a

Update api/routes.py

Browse files
Files changed (1) hide show
  1. api/routes.py +13 -18
api/routes.py CHANGED
@@ -1,11 +1,10 @@
1
- # api/routes.py
2
-
3
  import json
4
  from fastapi import APIRouter, Depends, HTTPException, Request, Response
5
- from fastapi.responses import StreamingResponse, JSONResponse
6
  from api.auth import verify_app_secret
7
- from api.models import ChatRequest, ImageResponseModel, ChatCompletionResponse
8
- from api.utils import process_gizai_stream_response, process_gizai_non_stream_response, GizAI
 
9
  from api.logger import setup_logger
10
 
11
  logger = setup_logger(__name__)
@@ -14,7 +13,7 @@ router = APIRouter()
14
 
15
  @router.options("/v1/chat/completions")
16
  @router.options("/api/v1/chat/completions")
17
- async def gizai_chat_completions_options():
18
  return Response(
19
  status_code=200,
20
  headers={
@@ -26,33 +25,29 @@ async def gizai_chat_completions_options():
26
 
27
  @router.get("/v1/models")
28
  @router.get("/api/v1/models")
29
- async def list_gizai_models():
30
- return {"object": "list", "data": GizAI.models}
31
 
32
  @router.post("/v1/chat/completions")
33
  @router.post("/api/v1/chat/completions")
34
- async def gizai_chat_completions(
35
  request: ChatRequest, app_secret: str = Depends(verify_app_secret)
36
  ):
37
- logger.info("Entering GizAI chat_completions route")
38
  logger.info(f"Processing chat completion request for model: {request.model}")
39
 
40
- model = GizAI.get_model(request.model)
41
- if model not in GizAI.models:
42
  raise HTTPException(
43
  status_code=400,
44
- detail=f"Model {request.model} is not supported. Supported models are: {', '.join(GizAI.models)}",
45
  )
46
 
47
  if request.stream:
48
- if GizAI.is_image_model(model):
49
- raise HTTPException(status_code=400, detail="Image generation does not support streaming.")
50
  logger.info("Streaming response")
51
- return StreamingResponse(process_gizai_stream_response(request, model), media_type="text/event-stream")
52
  else:
53
  logger.info("Non-streaming response")
54
- response = await process_gizai_non_stream_response(request, model)
55
- return response
56
 
57
  @router.route('/')
58
  @router.route('/healthz')
 
 
 
1
  import json
2
  from fastapi import APIRouter, Depends, HTTPException, Request, Response
3
+ from fastapi.responses import StreamingResponse
4
  from api.auth import verify_app_secret
5
+ from api.config import ALLOWED_MODELS
6
+ from api.models import ChatRequest
7
+ from api.utils import process_non_streaming_response, process_streaming_response
8
  from api.logger import setup_logger
9
 
10
  logger = setup_logger(__name__)
 
13
 
14
  @router.options("/v1/chat/completions")
15
  @router.options("/api/v1/chat/completions")
16
+ async def chat_completions_options():
17
  return Response(
18
  status_code=200,
19
  headers={
 
25
 
26
  @router.get("/v1/models")
27
  @router.get("/api/v1/models")
28
+ async def list_models():
29
+ return {"object": "list", "data": ALLOWED_MODELS}
30
 
31
  @router.post("/v1/chat/completions")
32
  @router.post("/api/v1/chat/completions")
33
+ async def chat_completions(
34
  request: ChatRequest, app_secret: str = Depends(verify_app_secret)
35
  ):
36
+ logger.info("Entering chat_completions route")
37
  logger.info(f"Processing chat completion request for model: {request.model}")
38
 
39
+ if request.model not in [model["id"] for model in ALLOWED_MODELS]:
 
40
  raise HTTPException(
41
  status_code=400,
42
+ detail=f"Model {request.model} is not allowed. Allowed models are: {', '.join(model['id'] for model in ALLOWED_MODELS)}",
43
  )
44
 
45
  if request.stream:
 
 
46
  logger.info("Streaming response")
47
+ return StreamingResponse(process_streaming_response(request), media_type="text/event-stream")
48
  else:
49
  logger.info("Non-streaming response")
50
+ return await process_non_streaming_response(request)
 
51
 
52
  @router.route('/')
53
  @router.route('/healthz')