Spaces:
Running
Running
Commit
·
b7c271c
1
Parent(s):
700db2b
All files
Browse files- .gitignore +173 -0
- app.py +110 -0
- requirements.txt +4 -0
- utils/constants.py +26 -0
- utils/utils.py +106 -0
.gitignore
ADDED
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
*.pdf
|
2 |
+
|
3 |
+
# Byte-compiled / optimized / DLL files
|
4 |
+
__pycache__/
|
5 |
+
*.py[cod]
|
6 |
+
*$py.class
|
7 |
+
|
8 |
+
# C extensions
|
9 |
+
*.so
|
10 |
+
|
11 |
+
# Distribution / packaging
|
12 |
+
.Python
|
13 |
+
build/
|
14 |
+
develop-eggs/
|
15 |
+
dist/
|
16 |
+
downloads/
|
17 |
+
eggs/
|
18 |
+
.eggs/
|
19 |
+
lib/
|
20 |
+
lib64/
|
21 |
+
parts/
|
22 |
+
sdist/
|
23 |
+
var/
|
24 |
+
wheels/
|
25 |
+
share/python-wheels/
|
26 |
+
*.egg-info/
|
27 |
+
.installed.cfg
|
28 |
+
*.egg
|
29 |
+
MANIFEST
|
30 |
+
|
31 |
+
# PyInstaller
|
32 |
+
# Usually these files are written by a python script from a template
|
33 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
34 |
+
*.manifest
|
35 |
+
*.spec
|
36 |
+
|
37 |
+
# Installer logs
|
38 |
+
pip-log.txt
|
39 |
+
pip-delete-this-directory.txt
|
40 |
+
|
41 |
+
# Unit test / coverage reports
|
42 |
+
htmlcov/
|
43 |
+
.tox/
|
44 |
+
.nox/
|
45 |
+
.coverage
|
46 |
+
.coverage.*
|
47 |
+
.cache
|
48 |
+
nosetests.xml
|
49 |
+
coverage.xml
|
50 |
+
*.cover
|
51 |
+
*.py,cover
|
52 |
+
.hypothesis/
|
53 |
+
.pytest_cache/
|
54 |
+
cover/
|
55 |
+
|
56 |
+
# Translations
|
57 |
+
*.mo
|
58 |
+
*.pot
|
59 |
+
|
60 |
+
# Django stuff:
|
61 |
+
*.log
|
62 |
+
local_settings.py
|
63 |
+
db.sqlite3
|
64 |
+
db.sqlite3-journal
|
65 |
+
|
66 |
+
# Flask stuff:
|
67 |
+
instance/
|
68 |
+
.webassets-cache
|
69 |
+
|
70 |
+
# Scrapy stuff:
|
71 |
+
.scrapy
|
72 |
+
|
73 |
+
# Sphinx documentation
|
74 |
+
docs/_build/
|
75 |
+
|
76 |
+
# PyBuilder
|
77 |
+
.pybuilder/
|
78 |
+
target/
|
79 |
+
|
80 |
+
# Jupyter Notebook
|
81 |
+
.ipynb_checkpoints
|
82 |
+
|
83 |
+
# IPython
|
84 |
+
profile_default/
|
85 |
+
ipython_config.py
|
86 |
+
|
87 |
+
# pyenv
|
88 |
+
# For a library or package, you might want to ignore these files since the code is
|
89 |
+
# intended to run in multiple environments; otherwise, check them in:
|
90 |
+
# .python-version
|
91 |
+
|
92 |
+
# pipenv
|
93 |
+
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
94 |
+
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
95 |
+
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
96 |
+
# install all needed dependencies.
|
97 |
+
#Pipfile.lock
|
98 |
+
|
99 |
+
# UV
|
100 |
+
# Similar to Pipfile.lock, it is generally recommended to include uv.lock in version control.
|
101 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
102 |
+
# commonly ignored for libraries.
|
103 |
+
#uv.lock
|
104 |
+
|
105 |
+
# poetry
|
106 |
+
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
107 |
+
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
108 |
+
# commonly ignored for libraries.
|
109 |
+
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
110 |
+
#poetry.lock
|
111 |
+
|
112 |
+
# pdm
|
113 |
+
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
114 |
+
#pdm.lock
|
115 |
+
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
116 |
+
# in version control.
|
117 |
+
# https://pdm.fming.dev/latest/usage/project/#working-with-version-control
|
118 |
+
.pdm.toml
|
119 |
+
.pdm-python
|
120 |
+
.pdm-build/
|
121 |
+
|
122 |
+
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
123 |
+
__pypackages__/
|
124 |
+
|
125 |
+
# Celery stuff
|
126 |
+
celerybeat-schedule
|
127 |
+
celerybeat.pid
|
128 |
+
|
129 |
+
# SageMath parsed files
|
130 |
+
*.sage.py
|
131 |
+
|
132 |
+
# Environments
|
133 |
+
.env
|
134 |
+
.venv
|
135 |
+
env/
|
136 |
+
venv/
|
137 |
+
ENV/
|
138 |
+
env.bak/
|
139 |
+
venv.bak/
|
140 |
+
|
141 |
+
# Spyder project settings
|
142 |
+
.spyderproject
|
143 |
+
.spyproject
|
144 |
+
|
145 |
+
# Rope project settings
|
146 |
+
.ropeproject
|
147 |
+
|
148 |
+
# mkdocs documentation
|
149 |
+
/site
|
150 |
+
|
151 |
+
# mypy
|
152 |
+
.mypy_cache/
|
153 |
+
.dmypy.json
|
154 |
+
dmypy.json
|
155 |
+
|
156 |
+
# Pyre type checker
|
157 |
+
.pyre/
|
158 |
+
|
159 |
+
# pytype static type analyzer
|
160 |
+
.pytype/
|
161 |
+
|
162 |
+
# Cython debug symbols
|
163 |
+
cython_debug/
|
164 |
+
|
165 |
+
# PyCharm
|
166 |
+
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
167 |
+
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
168 |
+
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
169 |
+
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
170 |
+
#.idea/
|
171 |
+
|
172 |
+
# PyPI configuration file
|
173 |
+
.pypirc
|
app.py
ADDED
@@ -0,0 +1,110 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
from utils.constants import model_family_mapping, model_name_mapping
|
3 |
+
from utils.utils import PitchPerfect, pdf_loader
|
4 |
+
|
5 |
+
st.set_page_config(
|
6 |
+
page_title = "Pitch Perfect",
|
7 |
+
page_icon = "📝",
|
8 |
+
layout = "wide"
|
9 |
+
)
|
10 |
+
|
11 |
+
def initialize_session_state():
|
12 |
+
|
13 |
+
if 'api_configured' not in st.session_state:
|
14 |
+
st.session_state.api_configured = False
|
15 |
+
|
16 |
+
if 'pitch_perfect' not in st.session_state:
|
17 |
+
st.session_state.pitch_perfect = None
|
18 |
+
|
19 |
+
initialize_session_state()
|
20 |
+
|
21 |
+
with st.sidebar:
|
22 |
+
st.title("Model API Configuration")
|
23 |
+
|
24 |
+
model_options = [
|
25 |
+
"GPT-4o mini",
|
26 |
+
"GPT-4o",
|
27 |
+
"o1",
|
28 |
+
"o3-mini",
|
29 |
+
"Deepseek-V3",
|
30 |
+
"Deepseek-r1",
|
31 |
+
"Mistral Small 24B",
|
32 |
+
"LLaMa 3.3 70B",
|
33 |
+
"DeepSeek R1 Distill",
|
34 |
+
"Mistral 7B v0.3"
|
35 |
+
]
|
36 |
+
selected_model = st.selectbox("Select which LLM to use", model_options, key = "selected_model")
|
37 |
+
model_name = model_name_mapping.get(selected_model)
|
38 |
+
model_family = model_family_mapping.get(selected_model)
|
39 |
+
|
40 |
+
if model_family == "gpt":
|
41 |
+
token = st.text_input("OpenAI API Key", type="password", key="openai_key")
|
42 |
+
else:
|
43 |
+
token = st.text_input("Hugging Face Token", type="password", key="hf_token")
|
44 |
+
|
45 |
+
if st.button("Initialize with the provided keys"):
|
46 |
+
try:
|
47 |
+
st.session_state.pitch_perfect = PitchPerfect(model = model_name, model_family = model_family, token = token)
|
48 |
+
st.session_state.api_configured = True
|
49 |
+
st.success("Successfully configured the API clients with provided keys!")
|
50 |
+
|
51 |
+
except Exception as e:
|
52 |
+
st.error(f"Error initializing API clients: {str(e)}")
|
53 |
+
st.session_state.api_configured = False
|
54 |
+
|
55 |
+
if st.session_state.api_configured:
|
56 |
+
upload_cv = st.file_uploader("Upload CV in PDF format", type=["pdf"])
|
57 |
+
if upload_cv is not None:
|
58 |
+
st.success(f"File uploaded successfully: {upload_cv.name}")
|
59 |
+
|
60 |
+
temp_file = "./temp.pdf"
|
61 |
+
with open(temp_file, "wb") as file:
|
62 |
+
file.write(upload_cv.getvalue())
|
63 |
+
file_name = upload_cv.name
|
64 |
+
|
65 |
+
cv_data = pdf_loader(temp_file)
|
66 |
+
|
67 |
+
if not st.session_state.api_configured:
|
68 |
+
st.warning("Please configure the models in the sidebar to proceed")
|
69 |
+
st.stop()
|
70 |
+
|
71 |
+
st.title("Pitch Perfect")
|
72 |
+
st.subheader("A cutting-edge app that crafts the perfect cover letter, tailored to land your dream job effortlessly!")
|
73 |
+
|
74 |
+
col1, col2 = st.columns(2)
|
75 |
+
|
76 |
+
# with col1:
|
77 |
+
# upload_cv = st.file_uploader("Upload CV in PDF format", type=["pdf"])
|
78 |
+
# if upload_cv is not None:
|
79 |
+
# st.success(f"File uploaded successfully: {upload_cv.name}")
|
80 |
+
|
81 |
+
# temp_file = "./temp.pdf"
|
82 |
+
# with open(temp_file, "wb") as file:
|
83 |
+
# file.write(upload_cv.getvalue())
|
84 |
+
# file_name = upload_cv.name
|
85 |
+
|
86 |
+
# cv_data = pdf_loader(temp_file)
|
87 |
+
|
88 |
+
with col1:
|
89 |
+
job_title = st.text_input("Job Title", key="job_title")
|
90 |
+
|
91 |
+
with col2:
|
92 |
+
company_name = st.text_input("Company Name", key="company_name")
|
93 |
+
|
94 |
+
# if upload_cv:
|
95 |
+
# st.write(cv_data)
|
96 |
+
|
97 |
+
job_description = st.text_area("Please paste the entire job description here:")
|
98 |
+
|
99 |
+
if st.button("Generate Cover Letter"):
|
100 |
+
with st.spinner("Generating Cover Letter....."):
|
101 |
+
client = st.session_state.pitch_perfect
|
102 |
+
cover_letter, reason = client.generate_cover_letter(job_title = job_title,
|
103 |
+
company = company_name,
|
104 |
+
job_desc = job_description,
|
105 |
+
cv_data = cv_data)
|
106 |
+
|
107 |
+
st.success("Cover Letter Generated")
|
108 |
+
st.markdown(cover_letter)
|
109 |
+
with st.expander("Model Reasoning:"):
|
110 |
+
st.write(reason)
|
requirements.txt
ADDED
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
1 |
+
huggingface_hub==0.28.1
|
2 |
+
langchain_community==0.3.17
|
3 |
+
openai==1.61.1
|
4 |
+
streamlit==1.42.0
|
utils/constants.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
system_prompt = "You're an Intelligent Custom Cover Letter writer. Your task is to generate a very professional and perfect cover letter based on all the details you get."
|
2 |
+
model_name_mapping = {
|
3 |
+
"GPT-4o mini":"gpt-4o-mini",
|
4 |
+
"GPT-4o": "gpt-4o",
|
5 |
+
"o1": "o1",
|
6 |
+
"o3-mini": "o3-mini",
|
7 |
+
"Deepseek-V3": "deepseek-ai/DeepSeek-V3",
|
8 |
+
"Deepseek-r1": "deepseek-ai/DeepSeek-R1",
|
9 |
+
"Mistral Small 24B": "mistralai/Mistral-Small-24B-Instruct-2501",
|
10 |
+
"LLaMa 3.3 70B": "meta-llama/Llama-3.3-70B-Instruct",
|
11 |
+
"DeepSeek-R1-Distill-Qwen-32B": "deepseek-ai/DeepSeek-R1-Distill-Qwen-32B",
|
12 |
+
"Mistral 7B v0.3": "mistralai/Mistral-7B-Instruct-v0.3"
|
13 |
+
}
|
14 |
+
|
15 |
+
model_family_mapping = {
|
16 |
+
"GPT-4o mini":"gpt",
|
17 |
+
"GPT-4o": "gpt",
|
18 |
+
"o1": "gpt",
|
19 |
+
"o3-mini": "gpt",
|
20 |
+
"Deepseek-V3": "together",
|
21 |
+
"Deepseek-r1": "together",
|
22 |
+
"Mistral Small 24B": "together",
|
23 |
+
"LLaMa 3.3 70B": "together",
|
24 |
+
"DeepSeek-R1-Distill-Qwen-32B": "hf",
|
25 |
+
"Mistral 7B v0.3": "hf"
|
26 |
+
}
|
utils/utils.py
ADDED
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from langchain_community.document_loaders import PyPDFLoader
|
2 |
+
from huggingface_hub import InferenceClient
|
3 |
+
from openai import OpenAI
|
4 |
+
from utils.constants import system_prompt
|
5 |
+
|
6 |
+
def pdf_loader(file):
|
7 |
+
loader = PyPDFLoader(file)
|
8 |
+
cv_data = ""
|
9 |
+
|
10 |
+
for page in loader.lazy_load():
|
11 |
+
cv_data += page.page_content
|
12 |
+
# print(cv_data)
|
13 |
+
|
14 |
+
return cv_data
|
15 |
+
|
16 |
+
class PitchPerfect:
|
17 |
+
|
18 |
+
def __init__(self, model, model_family, token, system_prompt = system_prompt):
|
19 |
+
|
20 |
+
self.system_prompt = system_prompt
|
21 |
+
self.model = model
|
22 |
+
|
23 |
+
if model_family == "gpt":
|
24 |
+
self.client = OpenAI(api_key = token)
|
25 |
+
elif model_family == "together":
|
26 |
+
self.client = InferenceClient(provider = "together", api_key = token)
|
27 |
+
else:
|
28 |
+
self.client = InferenceClient(provider="hf-inference", api_key = token)
|
29 |
+
|
30 |
+
|
31 |
+
|
32 |
+
def prepare_user_prompt(self, job_title, company, job_desc, cv_data, word_limit):
|
33 |
+
|
34 |
+
user_prompt = (
|
35 |
+
f"Job Title: {job_title}\n"
|
36 |
+
f"Company Name: {company}\n"
|
37 |
+
f"Job Description: {job_desc}\n\n"
|
38 |
+
f"CV Data: {cv_data}\n\n"
|
39 |
+
f"""
|
40 |
+
Instructions:
|
41 |
+
Based on the job description and the CV Data, write a concise cover letter in under {word_limit} words, following this format and structure and using a positive and humble tone.
|
42 |
+
|
43 |
+
Include in Your Cover Letter:
|
44 |
+
Start with: "Hi [hiring manager's name],"
|
45 |
+
|
46 |
+
Introduce yourself as a graduate from [name of your university], looking for a role as a [Job Title]. Add 2 lines of brief introduction of my profile.
|
47 |
+
|
48 |
+
Begin a new paragraph with: "Here are reasons that make me a great fit for the role:".
|
49 |
+
|
50 |
+
List at least 4 reasons based on the requirements from the job description, where these should be in the order that are in the job description. Explain how my skills align with the role's requirements (Include impactful numbers and results from my CV to show alignment)
|
51 |
+
|
52 |
+
Conclude with: "Please find my CV attached below. I look forward to hearing from you."
|
53 |
+
End with: "Best wishes, [Your Name]"
|
54 |
+
|
55 |
+
|
56 |
+
Example Cover Letter:
|
57 |
+
|
58 |
+
Hi [Hiring Manager's Name],
|
59 |
+
|
60 |
+
I am a recent MSc Management graduate from [University Name], looking for a role as a [Job Title].
|
61 |
+
|
62 |
+
Here are the reasons that make me a great fit for the role:
|
63 |
+
|
64 |
+
[First reason, based on experience or passion related to the job. Include the quantifiable impact I made]
|
65 |
+
[Second reason, highlighting skills or achievements. Include the quantifiable impact I made]
|
66 |
+
[Third reason, highlighting skills or achievements. Include the quantifiable impact I made]
|
67 |
+
[Fourth reason, highlighting skills or achievements. Include the quantifiable impact I made]
|
68 |
+
Please find my CV attached below. I look forward to hearing from you.
|
69 |
+
|
70 |
+
Best wishes,
|
71 |
+
[Your Name]
|
72 |
+
|
73 |
+
"""
|
74 |
+
)
|
75 |
+
|
76 |
+
return user_prompt
|
77 |
+
|
78 |
+
def generate_cover_letter(self, job_title, company, job_desc, cv_data, word_limit = 400, temp = 0.7, top_p = 0.9):
|
79 |
+
|
80 |
+
user_prompt = self.prepare_user_prompt(job_title, company, job_desc, cv_data, word_limit)
|
81 |
+
|
82 |
+
messages = [
|
83 |
+
{"role" : "system", "content" : self.system_prompt},
|
84 |
+
{"role" : "user", "content" : user_prompt}
|
85 |
+
]
|
86 |
+
|
87 |
+
response = self.client.chat.completions.create(
|
88 |
+
model = self.model,
|
89 |
+
messages = messages,
|
90 |
+
temperature = temp,
|
91 |
+
top_p = top_p
|
92 |
+
)
|
93 |
+
|
94 |
+
cover_letter = response.choices[0].message.content
|
95 |
+
|
96 |
+
if "<think>" in cover_letter:
|
97 |
+
reason = cover_letter.split("</think>")[0].replace("<think>","")
|
98 |
+
cover_letter = cover_letter.split("</think>")[1]
|
99 |
+
else:
|
100 |
+
reason = "This model doesn't offer reasoning."
|
101 |
+
|
102 |
+
return cover_letter, reason
|
103 |
+
|
104 |
+
|
105 |
+
|
106 |
+
|