StevenChen16 commited on
Commit
1c274b4
·
1 Parent(s): 865ad03

change the position of spaces.GPU into the class RAGChatbot

Browse files
Files changed (1) hide show
  1. app.py +3 -2
app.py CHANGED
@@ -14,6 +14,7 @@ def create_embedding_model(model_name):
14
  model_kwargs={'trust_remote_code': True}
15
  )
16
 
 
17
  class RAGChatbot:
18
  def __init__(self):
19
  # First create embeddings directly
@@ -79,7 +80,7 @@ class RAGChatbot:
79
  Now, please guide me step by step to describe the legal issues I am facing, according to the above requirements.
80
  '''
81
 
82
- @spaces.GPU
83
  def init_models(self):
84
  """Initialize the LLM model"""
85
  print("Initializing LLM model...")
@@ -129,7 +130,7 @@ class RAGChatbot:
129
  docs = retriever.invoke(query)
130
  return "\n".join(doc.page_content for doc in docs)
131
 
132
- @spaces.GPU(duration=120)
133
  def generate_response(self, message, history, temperature=0.6, max_new_tokens=4096):
134
  """Generate streaming response with RAG context"""
135
  # Get relevant context
 
14
  model_kwargs={'trust_remote_code': True}
15
  )
16
 
17
+ @spaces.GPU(duration=120)
18
  class RAGChatbot:
19
  def __init__(self):
20
  # First create embeddings directly
 
80
  Now, please guide me step by step to describe the legal issues I am facing, according to the above requirements.
81
  '''
82
 
83
+ # @spaces.GPU
84
  def init_models(self):
85
  """Initialize the LLM model"""
86
  print("Initializing LLM model...")
 
130
  docs = retriever.invoke(query)
131
  return "\n".join(doc.page_content for doc in docs)
132
 
133
+ # @spaces.GPU(duration=120)
134
  def generate_response(self, message, history, temperature=0.6, max_new_tokens=4096):
135
  """Generate streaming response with RAG context"""
136
  # Get relevant context