Spaces:
Configuration error
Configuration error
Cleaned up requirements
Browse files- configuration.py +8 -2
- functions/gradio.py +3 -3
- functions/job_call.py +9 -18
- requirements.txt +2 -2
configuration.py
CHANGED
|
@@ -1,6 +1,7 @@
|
|
| 1 |
"""Global configuration for the Resumate application."""
|
| 2 |
|
| 3 |
import os
|
|
|
|
| 4 |
from smolagents import OpenAIServerModel
|
| 5 |
|
| 6 |
DEFAULT_GITHUB_PROFILE = "https://github.com/gperdrizet"
|
|
@@ -10,9 +11,14 @@ DEFAULT_GITHUB_PROFILE = "https://github.com/gperdrizet"
|
|
| 10 |
# max_tokens=8000
|
| 11 |
# )
|
| 12 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 13 |
AGENT_MODEL = OpenAIServerModel(
|
| 14 |
-
model_id="
|
| 15 |
-
api_base="https://gperdrizet--
|
| 16 |
api_key=os.environ["MODAL_TOKEN_SECRET"],
|
| 17 |
)
|
| 18 |
|
|
|
|
| 1 |
"""Global configuration for the Resumate application."""
|
| 2 |
|
| 3 |
import os
|
| 4 |
+
from openai import OpenAI
|
| 5 |
from smolagents import OpenAIServerModel
|
| 6 |
|
| 7 |
DEFAULT_GITHUB_PROFILE = "https://github.com/gperdrizet"
|
|
|
|
| 11 |
# max_tokens=8000
|
| 12 |
# )
|
| 13 |
|
| 14 |
+
SUMMARIZER_MODEL = OpenAI(
|
| 15 |
+
base_url="https://gperdrizet--vllm-openai-compatible-summarization-serve.modal.run/v1",
|
| 16 |
+
api_key=os.environ['MODAL_TOKEN_SECRET']
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
AGENT_MODEL = OpenAIServerModel(
|
| 20 |
+
model_id="deepseek-R1-qwen-32B",
|
| 21 |
+
api_base="https://gperdrizet--deepseek-R1-qwen-32B-serve.modal.run/v1",
|
| 22 |
api_key=os.environ["MODAL_TOKEN_SECRET"],
|
| 23 |
)
|
| 24 |
|
functions/gradio.py
CHANGED
|
@@ -148,7 +148,7 @@ def process_inputs(linkedin_pdf, github_url, job_post_text, user_instructions):
|
|
| 148 |
else:
|
| 149 |
result += "ℹ️ No job post provided, attempting to use default\n"
|
| 150 |
logger.info("No job post text provided, trying default")
|
| 151 |
-
|
| 152 |
# Try to load default job call
|
| 153 |
default_job = load_default_job_call()
|
| 154 |
if default_job:
|
|
@@ -230,8 +230,8 @@ def get_processed_data(linkedin_pdf, github_url, job_post_text, instructions):
|
|
| 230 |
if default_job:
|
| 231 |
job_post_text = default_job
|
| 232 |
else:
|
| 233 |
-
# No job post provided and no default available
|
| 234 |
-
logger.info("No job post provided and no default available
|
| 235 |
job_post_text = None
|
| 236 |
|
| 237 |
processed_data = {
|
|
|
|
| 148 |
else:
|
| 149 |
result += "ℹ️ No job post provided, attempting to use default\n"
|
| 150 |
logger.info("No job post text provided, trying default")
|
| 151 |
+
|
| 152 |
# Try to load default job call
|
| 153 |
default_job = load_default_job_call()
|
| 154 |
if default_job:
|
|
|
|
| 230 |
if default_job:
|
| 231 |
job_post_text = default_job
|
| 232 |
else:
|
| 233 |
+
# No job post provided and no default available
|
| 234 |
+
logger.info("No job post provided and no default available")
|
| 235 |
job_post_text = None
|
| 236 |
|
| 237 |
processed_data = {
|
functions/job_call.py
CHANGED
|
@@ -1,12 +1,10 @@
|
|
| 1 |
'''Functions for summarizing and formatting job calls.'''
|
| 2 |
|
| 3 |
-
import os
|
| 4 |
import json
|
| 5 |
import logging
|
| 6 |
from pathlib import Path
|
| 7 |
from datetime import datetime
|
| 8 |
-
from
|
| 9 |
-
from configuration import JOB_CALL_EXTRACTION_PROMPT
|
| 10 |
|
| 11 |
# pylint: disable=broad-exception-caught
|
| 12 |
|
|
@@ -59,14 +57,16 @@ def summarize_job_call(job_call: str) -> str:
|
|
| 59 |
|
| 60 |
logger.info("Summarizing job call (%d characters)", len(job_call))
|
| 61 |
|
| 62 |
-
client = OpenAI(
|
|
|
|
|
|
|
| 63 |
|
| 64 |
-
client.base_url = (
|
| 65 |
-
|
| 66 |
-
)
|
| 67 |
|
| 68 |
# Default to first available model
|
| 69 |
-
model =
|
| 70 |
model_id = model.id
|
| 71 |
|
| 72 |
messages = [
|
|
@@ -79,19 +79,10 @@ def summarize_job_call(job_call: str) -> str:
|
|
| 79 |
completion_args = {
|
| 80 |
'model': model_id,
|
| 81 |
'messages': messages,
|
| 82 |
-
# "frequency_penalty": args.frequency_penalty,
|
| 83 |
-
# "max_tokens": 128,
|
| 84 |
-
# "n": args.n,
|
| 85 |
-
# "presence_penalty": args.presence_penalty,
|
| 86 |
-
# "seed": args.seed,
|
| 87 |
-
# "stop": args.stop,
|
| 88 |
-
# "stream": args.stream,
|
| 89 |
-
# "temperature": args.temperature,
|
| 90 |
-
# "top_p": args.top_p,
|
| 91 |
}
|
| 92 |
|
| 93 |
try:
|
| 94 |
-
response =
|
| 95 |
|
| 96 |
except Exception as e:
|
| 97 |
response = None
|
|
|
|
| 1 |
'''Functions for summarizing and formatting job calls.'''
|
| 2 |
|
|
|
|
| 3 |
import json
|
| 4 |
import logging
|
| 5 |
from pathlib import Path
|
| 6 |
from datetime import datetime
|
| 7 |
+
from configuration import JOB_CALL_EXTRACTION_PROMPT, SUMMARIZER_MODEL
|
|
|
|
| 8 |
|
| 9 |
# pylint: disable=broad-exception-caught
|
| 10 |
|
|
|
|
| 57 |
|
| 58 |
logger.info("Summarizing job call (%d characters)", len(job_call))
|
| 59 |
|
| 60 |
+
# client = OpenAI(
|
| 61 |
+
# api_key=os.environ['MODAL_TOKEN_SECRET']
|
| 62 |
+
# )
|
| 63 |
|
| 64 |
+
# client.base_url = (
|
| 65 |
+
# 'https://gperdrizet--deepseek-R1-qwen-32B-serve.modal.run/v1'
|
| 66 |
+
# )
|
| 67 |
|
| 68 |
# Default to first available model
|
| 69 |
+
model = SUMMARIZER_MODEL.models.list().data[0]
|
| 70 |
model_id = model.id
|
| 71 |
|
| 72 |
messages = [
|
|
|
|
| 79 |
completion_args = {
|
| 80 |
'model': model_id,
|
| 81 |
'messages': messages,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 82 |
}
|
| 83 |
|
| 84 |
try:
|
| 85 |
+
response = SUMMARIZER_MODEL.chat.completions.create(**completion_args)
|
| 86 |
|
| 87 |
except Exception as e:
|
| 88 |
response = None
|
requirements.txt
CHANGED
|
@@ -1,6 +1,6 @@
|
|
| 1 |
gradio==5.35.0
|
| 2 |
modal
|
|
|
|
| 3 |
PyPDF2
|
| 4 |
requests
|
| 5 |
-
smolagents[openai]
|
| 6 |
-
openai
|
|
|
|
| 1 |
gradio==5.35.0
|
| 2 |
modal
|
| 3 |
+
openai
|
| 4 |
PyPDF2
|
| 5 |
requests
|
| 6 |
+
smolagents[openai]
|
|
|