Update app.py
Browse files
app.py
CHANGED
|
@@ -11,8 +11,7 @@ import io
|
|
| 11 |
logging.basicConfig(level=logging.INFO)
|
| 12 |
logger = logging.getLogger(__name__)
|
| 13 |
|
| 14 |
-
#
|
| 15 |
-
|
| 16 |
CONVERSATION_PROMPT = """You are LOSS DOG, a professional profile builder. Your goal is to have natural conversations
|
| 17 |
with users to gather information about their professional background across 9 categories:
|
| 18 |
|
|
@@ -137,42 +136,47 @@ Return the data in this exact structure:
|
|
| 137 |
}
|
| 138 |
|
| 139 |
IMPORTANT: Return ONLY the JSON. Do not add any explanation text."""
|
|
|
|
| 140 |
class ProfileBuilder:
|
| 141 |
def __init__(self):
|
| 142 |
-
self.conversation_history = []
|
| 143 |
self.client = None
|
| 144 |
self.pdf_text = None
|
| 145 |
|
| 146 |
def _initialize_client(self, api_key: str) -> None:
|
| 147 |
"""Initialize OpenAI client if not already initialized"""
|
| 148 |
-
if not
|
| 149 |
-
|
| 150 |
-
|
| 151 |
-
self.client = AsyncOpenAI(api_key=api_key)
|
| 152 |
|
| 153 |
-
async def process_message(self, message: str, api_key: str) -> Dict[str, Any]:
|
| 154 |
-
"""Process a chat message"""
|
| 155 |
try:
|
| 156 |
# Initialize client if needed
|
| 157 |
self._initialize_client(api_key)
|
| 158 |
-
|
| 159 |
-
#
|
| 160 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 161 |
|
| 162 |
# Get AI response
|
| 163 |
completion = await self.client.chat.completions.create(
|
| 164 |
-
model="gpt-
|
| 165 |
messages=[
|
| 166 |
{"role": "system", "content": CONVERSATION_PROMPT},
|
| 167 |
-
*
|
| 168 |
],
|
| 169 |
temperature=0.7
|
| 170 |
)
|
| 171 |
|
| 172 |
-
# Extract
|
| 173 |
ai_message = completion.choices[0].message.content
|
| 174 |
-
self.conversation_history.append({"role": "assistant", "content": ai_message})
|
| 175 |
-
|
| 176 |
return {"response": ai_message}
|
| 177 |
|
| 178 |
except Exception as e:
|
|
@@ -187,7 +191,7 @@ class ProfileBuilder:
|
|
| 187 |
text = ""
|
| 188 |
for page in pdf_reader.pages:
|
| 189 |
text += page.extract_text()
|
| 190 |
-
self.pdf_text = text
|
| 191 |
return text
|
| 192 |
except Exception as e:
|
| 193 |
logger.error(f"PDF extraction error: {str(e)}")
|
|
@@ -204,7 +208,7 @@ class ProfileBuilder:
|
|
| 204 |
|
| 205 |
# Process with AI
|
| 206 |
completion = await self.client.chat.completions.create(
|
| 207 |
-
model="gpt-
|
| 208 |
messages=[
|
| 209 |
{"role": "system", "content": EXTRACTION_PROMPT},
|
| 210 |
{"role": "user", "content": f"Extract profile information from this resume:\n\n{resume_text}"}
|
|
@@ -231,18 +235,14 @@ class ProfileBuilder:
|
|
| 231 |
logger.error(f"PDF processing error: {str(e)}")
|
| 232 |
return {"error": str(e)}
|
| 233 |
|
| 234 |
-
async def generate_profile(self) -> tuple[Dict[str, Any], Optional[str]]:
|
| 235 |
"""Generate profile from conversation or PDF"""
|
| 236 |
try:
|
| 237 |
-
|
| 238 |
-
raise ValueError("OpenAI client not initialized")
|
| 239 |
|
| 240 |
# Determine source and prepare content
|
| 241 |
-
if
|
| 242 |
-
content = "\n".join(
|
| 243 |
-
f"{msg['role']}: {msg['content']}"
|
| 244 |
-
for msg in self.conversation_history
|
| 245 |
-
)
|
| 246 |
source = "conversation"
|
| 247 |
elif self.pdf_text:
|
| 248 |
content = self.pdf_text
|
|
@@ -252,7 +252,7 @@ class ProfileBuilder:
|
|
| 252 |
|
| 253 |
# Get AI extraction
|
| 254 |
completion = await self.client.chat.completions.create(
|
| 255 |
-
model="gpt-
|
| 256 |
messages=[
|
| 257 |
{"role": "system", "content": EXTRACTION_PROMPT},
|
| 258 |
{"role": "user", "content": f"Extract profile information from this {source}:\n\n{content}"}
|
|
@@ -340,7 +340,7 @@ def create_gradio_interface():
|
|
| 340 |
if not message.strip():
|
| 341 |
return history, None, None
|
| 342 |
|
| 343 |
-
result = await builder.process_message(message, key)
|
| 344 |
|
| 345 |
if "error" in result:
|
| 346 |
return history, {"error": result["error"]}, None
|
|
@@ -367,8 +367,8 @@ def create_gradio_interface():
|
|
| 367 |
except Exception as e:
|
| 368 |
return {"error": str(e)}, None
|
| 369 |
|
| 370 |
-
async def on_generate():
|
| 371 |
-
profile, filename = await builder.generate_profile()
|
| 372 |
if "error" in profile:
|
| 373 |
return {"error": profile["error"]}, None
|
| 374 |
return profile["profile_data"], filename
|
|
@@ -377,13 +377,13 @@ def create_gradio_interface():
|
|
| 377 |
msg.submit(
|
| 378 |
on_message,
|
| 379 |
inputs=[msg, chatbot, api_key],
|
| 380 |
-
outputs=[chatbot, profile_output,
|
| 381 |
)
|
| 382 |
|
| 383 |
send.click(
|
| 384 |
on_message,
|
| 385 |
inputs=[msg, chatbot, api_key],
|
| 386 |
-
outputs=[chatbot, profile_output,
|
| 387 |
)
|
| 388 |
|
| 389 |
process_pdf_btn.click(
|
|
@@ -394,6 +394,7 @@ def create_gradio_interface():
|
|
| 394 |
|
| 395 |
generate_btn.click(
|
| 396 |
on_generate,
|
|
|
|
| 397 |
outputs=[profile_output, download_btn]
|
| 398 |
)
|
| 399 |
|
|
|
|
| 11 |
logging.basicConfig(level=logging.INFO)
|
| 12 |
logger = logging.getLogger(__name__)
|
| 13 |
|
| 14 |
+
# Prompts
|
|
|
|
| 15 |
CONVERSATION_PROMPT = """You are LOSS DOG, a professional profile builder. Your goal is to have natural conversations
|
| 16 |
with users to gather information about their professional background across 9 categories:
|
| 17 |
|
|
|
|
| 136 |
}
|
| 137 |
|
| 138 |
IMPORTANT: Return ONLY the JSON. Do not add any explanation text."""
|
| 139 |
+
|
| 140 |
class ProfileBuilder:
|
| 141 |
def __init__(self):
|
|
|
|
| 142 |
self.client = None
|
| 143 |
self.pdf_text = None
|
| 144 |
|
| 145 |
def _initialize_client(self, api_key: str) -> None:
|
| 146 |
"""Initialize OpenAI client if not already initialized"""
|
| 147 |
+
if not api_key.startswith("sk-"):
|
| 148 |
+
raise ValueError("Invalid API key format")
|
| 149 |
+
self.client = AsyncOpenAI(api_key=api_key)
|
|
|
|
| 150 |
|
| 151 |
+
async def process_message(self, message: str, history: List[List[str]], api_key: str) -> Dict[str, Any]:
|
| 152 |
+
"""Process a chat message using conversation history from Gradio's state"""
|
| 153 |
try:
|
| 154 |
# Initialize client if needed
|
| 155 |
self._initialize_client(api_key)
|
| 156 |
+
|
| 157 |
+
# Convert Gradio history format to OpenAI message format
|
| 158 |
+
conversation_history = []
|
| 159 |
+
for human, assistant in history:
|
| 160 |
+
conversation_history.extend([
|
| 161 |
+
{"role": "user", "content": human},
|
| 162 |
+
{"role": "assistant", "content": assistant}
|
| 163 |
+
])
|
| 164 |
+
|
| 165 |
+
# Add current message
|
| 166 |
+
conversation_history.append({"role": "user", "content": message})
|
| 167 |
|
| 168 |
# Get AI response
|
| 169 |
completion = await self.client.chat.completions.create(
|
| 170 |
+
model="gpt-4-0125-preview",
|
| 171 |
messages=[
|
| 172 |
{"role": "system", "content": CONVERSATION_PROMPT},
|
| 173 |
+
*conversation_history
|
| 174 |
],
|
| 175 |
temperature=0.7
|
| 176 |
)
|
| 177 |
|
| 178 |
+
# Extract response
|
| 179 |
ai_message = completion.choices[0].message.content
|
|
|
|
|
|
|
| 180 |
return {"response": ai_message}
|
| 181 |
|
| 182 |
except Exception as e:
|
|
|
|
| 191 |
text = ""
|
| 192 |
for page in pdf_reader.pages:
|
| 193 |
text += page.extract_text()
|
| 194 |
+
self.pdf_text = text
|
| 195 |
return text
|
| 196 |
except Exception as e:
|
| 197 |
logger.error(f"PDF extraction error: {str(e)}")
|
|
|
|
| 208 |
|
| 209 |
# Process with AI
|
| 210 |
completion = await self.client.chat.completions.create(
|
| 211 |
+
model="gpt-4-0125-preview",
|
| 212 |
messages=[
|
| 213 |
{"role": "system", "content": EXTRACTION_PROMPT},
|
| 214 |
{"role": "user", "content": f"Extract profile information from this resume:\n\n{resume_text}"}
|
|
|
|
| 235 |
logger.error(f"PDF processing error: {str(e)}")
|
| 236 |
return {"error": str(e)}
|
| 237 |
|
| 238 |
+
async def generate_profile(self, history: List[List[str]], api_key: str) -> tuple[Dict[str, Any], Optional[str]]:
|
| 239 |
"""Generate profile from conversation or PDF"""
|
| 240 |
try:
|
| 241 |
+
self._initialize_client(api_key)
|
|
|
|
| 242 |
|
| 243 |
# Determine source and prepare content
|
| 244 |
+
if history:
|
| 245 |
+
content = "\n".join(f"User: {msg[0]}\nAssistant: {msg[1]}" for msg in history)
|
|
|
|
|
|
|
|
|
|
| 246 |
source = "conversation"
|
| 247 |
elif self.pdf_text:
|
| 248 |
content = self.pdf_text
|
|
|
|
| 252 |
|
| 253 |
# Get AI extraction
|
| 254 |
completion = await self.client.chat.completions.create(
|
| 255 |
+
model="gpt-4-0125-preview",
|
| 256 |
messages=[
|
| 257 |
{"role": "system", "content": EXTRACTION_PROMPT},
|
| 258 |
{"role": "user", "content": f"Extract profile information from this {source}:\n\n{content}"}
|
|
|
|
| 340 |
if not message.strip():
|
| 341 |
return history, None, None
|
| 342 |
|
| 343 |
+
result = await builder.process_message(message, history, key)
|
| 344 |
|
| 345 |
if "error" in result:
|
| 346 |
return history, {"error": result["error"]}, None
|
|
|
|
| 367 |
except Exception as e:
|
| 368 |
return {"error": str(e)}, None
|
| 369 |
|
| 370 |
+
async def on_generate(history: List[List[str]], key: str):
|
| 371 |
+
profile, filename = await builder.generate_profile(history, key)
|
| 372 |
if "error" in profile:
|
| 373 |
return {"error": profile["error"]}, None
|
| 374 |
return profile["profile_data"], filename
|
|
|
|
| 377 |
msg.submit(
|
| 378 |
on_message,
|
| 379 |
inputs=[msg, chatbot, api_key],
|
| 380 |
+
outputs=[chatbot, profile_output, download_btn]
|
| 381 |
)
|
| 382 |
|
| 383 |
send.click(
|
| 384 |
on_message,
|
| 385 |
inputs=[msg, chatbot, api_key],
|
| 386 |
+
outputs=[chatbot, profile_output, download_btn]
|
| 387 |
)
|
| 388 |
|
| 389 |
process_pdf_btn.click(
|
|
|
|
| 394 |
|
| 395 |
generate_btn.click(
|
| 396 |
on_generate,
|
| 397 |
+
inputs=[chatbot, api_key],
|
| 398 |
outputs=[profile_output, download_btn]
|
| 399 |
)
|
| 400 |
|