Spaces:
Runtime error
Runtime error
Commit
·
82ccde1
0
Parent(s):
Initial commit
Browse files- .devcontainer/devcontainer.json +32 -0
- .gitignore +3 -0
- .streamlit/config.toml +7 -0
- AI_Powered_Resume_Analyzer.ipynb +384 -0
- LICENSE.md +11 -0
- LinkedIn_scraper_with_Selenium.ipynb +1452 -0
- README.md +129 -0
- Thumbnail.jpg +0 -0
- __pycache__/chat.cpython-311.pyc +0 -0
- __pycache__/config.cpython-311.pyc +0 -0
- app.py +523 -0
- config.py +0 -0
- packages.txt +1 -0
- requirements.txt +11 -0
.devcontainer/devcontainer.json
ADDED
@@ -0,0 +1,32 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"name": "Python 3",
|
3 |
+
"image": "mcr.microsoft.com/devcontainers/python:1-3.11-bullseye",
|
4 |
+
"customizations": {
|
5 |
+
"codespaces": {
|
6 |
+
"openFiles": [
|
7 |
+
"README.md",
|
8 |
+
"app.py"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
"vscode": {
|
12 |
+
"settings": {},
|
13 |
+
"extensions": [
|
14 |
+
"ms-python.python",
|
15 |
+
"ms-python.vscode-pylance"
|
16 |
+
]
|
17 |
+
}
|
18 |
+
},
|
19 |
+
"updateContentCommand": "[ -f packages.txt ] && sudo apt update && sudo apt upgrade -y && sudo xargs apt install -y <packages.txt; [ -f requirements.txt ] && pip3 install --user -r requirements.txt; pip3 install --user streamlit; echo '✅ Packages installed and Requirements met'",
|
20 |
+
"postAttachCommand": {
|
21 |
+
"server": "streamlit run app.py --server.enableCORS false --server.enableXsrfProtection false"
|
22 |
+
},
|
23 |
+
"portsAttributes": {
|
24 |
+
"8501": {
|
25 |
+
"label": "Application",
|
26 |
+
"onAutoForward": "openPreview"
|
27 |
+
}
|
28 |
+
},
|
29 |
+
"forwardPorts": [
|
30 |
+
8501
|
31 |
+
]
|
32 |
+
}
|
.gitignore
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
.env
|
2 |
+
.streamlit/secrets.toml
|
3 |
+
chat.py
|
.streamlit/config.toml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[theme]
|
2 |
+
base="dark"
|
3 |
+
primaryColor="#FF4B4B"
|
4 |
+
backgroundColor="#0E1117"
|
5 |
+
secondaryBackgroundColor="#262730"
|
6 |
+
textColor="#FAFAFA"
|
7 |
+
font="sans serif"
|
AI_Powered_Resume_Analyzer.ipynb
ADDED
@@ -0,0 +1,384 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 67,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"from PyPDF2 import PdfReader\n",
|
10 |
+
"from langchain.text_splitter import RecursiveCharacterTextSplitter\n",
|
11 |
+
"from langchain.embeddings.openai import OpenAIEmbeddings\n",
|
12 |
+
"from langchain.vectorstores import FAISS\n",
|
13 |
+
"from langchain.chat_models import ChatOpenAI\n",
|
14 |
+
"from langchain.chains.question_answering import load_qa_chain\n",
|
15 |
+
"import warnings\n",
|
16 |
+
"warnings.filterwarnings('ignore')"
|
17 |
+
]
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"cell_type": "code",
|
21 |
+
"execution_count": 62,
|
22 |
+
"metadata": {},
|
23 |
+
"outputs": [
|
24 |
+
{
|
25 |
+
"name": "stdout",
|
26 |
+
"output_type": "stream",
|
27 |
+
"text": [
|
28 |
+
"<PyPDF2._reader.PdfReader object at 0x00000154154007D0>\n"
|
29 |
+
]
|
30 |
+
}
|
31 |
+
],
|
32 |
+
"source": [
|
33 |
+
"pdf = \"/content/resume.pdf\"\n",
|
34 |
+
"pdf_reader = PdfReader(pdf)\n",
|
35 |
+
"print(pdf_reader)"
|
36 |
+
]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"cell_type": "code",
|
40 |
+
"execution_count": null,
|
41 |
+
"metadata": {},
|
42 |
+
"outputs": [],
|
43 |
+
"source": [
|
44 |
+
"# extrat text from each page separately\n",
|
45 |
+
"text = \"\"\n",
|
46 |
+
"for page in pdf_reader.pages:\n",
|
47 |
+
" text += page.extract_text()\n",
|
48 |
+
"\n",
|
49 |
+
"print(text)"
|
50 |
+
]
|
51 |
+
},
|
52 |
+
{
|
53 |
+
"cell_type": "code",
|
54 |
+
"execution_count": 64,
|
55 |
+
"metadata": {},
|
56 |
+
"outputs": [
|
57 |
+
{
|
58 |
+
"data": {
|
59 |
+
"text/plain": [
|
60 |
+
"['GOPINATH ASOKAN \\nData Science Enthusiast \\nPassionate data science enthusiast with a strong foundation in diverse industries. Equipped with 5+ years\\nof industry experience, highly skilled in problem-solving, and project management. Eager to seamlessly\\nmerge analytical skills with artistic expertise for impactful insights and innovation. Excited to apply data-\\ndriven strategies to challenges, contribute proactively and effectively to the field, and drive innovation. \\[email protected] \\nlinkedin.com/in/gopiashokan \\ngithub.com/gopiashokan \\nWORK EXPERIENCE \\nSenior Process Executive - Operations \\nMahendra Next Wealth IT India Pvt Ltd \\n05/2019 - 12/2022\\n, \\n \\nNamakkal',\n",
|
61 |
+
" \"linkedin.com/in/gopiashokan \\ngithub.com/gopiashokan \\nWORK EXPERIENCE \\nSenior Process Executive - Operations \\nMahendra Next Wealth IT India Pvt Ltd \\n05/2019 - 12/2022\\n, \\n \\nNamakkal \\nProficiently executed image editing tasks for bigbasket's\\nproduct images, encompassing renaming, retouching, \\ncolor\\ncorrection, content cropping, and photo manipulation. \\nExpertly designed captivating banners and creatives for\\nadvertisements, skillfully integrating combo packs, multi-\\npacks, and hero images into Bigbasket's product pages. \\nContributed to taxonomy by mapping tax codes, manually\\nidentified competitor products, and verified AI-generated\\noutputs for accuracy, assisting in AI improvement efforts.\",\n",
|
62 |
+
" 'Contributed to taxonomy by mapping tax codes, manually\\nidentified competitor products, and verified AI-generated\\noutputs for accuracy, assisting in AI improvement efforts. \\nAssociate Engineer - Quality \\nRudra Blades and Edges Pvt Ltd \\n07/2018 - 12/2018\\n, \\n \\nChennai \\nPerformed continuous and comprehensive material analysis\\nto ensure structural integrity and precise alignment with\\ncustomer specifications as maintaining quality standards. \\nConsistently maintained high quality standards at critical\\nwelding, grinding, and precision parallelism stations, by\\nensuring unwavering quality in the production process. \\nProficiently managed and coordinated material dispatch',\n",
|
63 |
+
" 'welding, grinding, and precision parallelism stations, by\\nensuring unwavering quality in the production process. \\nProficiently managed and coordinated material dispatch\\nwhile meeting both regular order requirements and the\\npriority to ensure prompt and reliable customer service. \\nGraduate Engineer Trainee - Quality \\nLear Automotive India Pvt Ltd \\n07/2016 - 07/2017\\n, \\n \\nChennai \\nEfficiently managed productive customer meetings while\\nskillfully addressing challenging issues through \\ndetailed\\ncomprehensive Minutes of Meeting documentation. \\nMaintained stock alignment, meticulously validating the\\nperpetual and wall-to-wall inventory in physical and QAD',\n",
|
64 |
+
" 'detailed\\ncomprehensive Minutes of Meeting documentation. \\nMaintained stock alignment, meticulously validating the\\nperpetual and wall-to-wall inventory in physical and QAD\\nsoftware systems to ensure inventory precision & accuracy. \\nImplemented Q-Point, ICA, & PCA for quality enhancement\\nand has managed up-to-date Quality Notice documentation\\nwith 8D reports in the SQTS system for the issue resolution. \\nEDUCATION \\nMaster Data Science \\nGUVI Geeks Network Pvt Ltd \\n2023\\n, \\n \\nChennai \\nB.E. in Mechanical Engineering \\nKnowledge Institute of Technology \\n2012 - 2016\\n, \\n \\nSalem \\nSKILLS \\nPython \\nPostgreSQL \\nMongoDB \\nTableau \\nPowerBI \\nMachine Learning \\nDeep Learning \\nNLP \\nLLM \\nOpenAI \\nSelenium \\nAirflow',\n",
|
65 |
+
" 'Knowledge Institute of Technology \\n2012 - 2016\\n, \\n \\nSalem \\nSKILLS \\nPython \\nPostgreSQL \\nMongoDB \\nTableau \\nPowerBI \\nMachine Learning \\nDeep Learning \\nNLP \\nLLM \\nOpenAI \\nSelenium \\nAirflow \\nHadoop \\nPySpark \\nOCR \\nNumpy \\nPandas \\nStreamlit \\nPlotly \\nMatplotlib \\nSeaborn \\nCERTIFICATE \\nMicrosoft AI-900 Azure AI Fundamentals\\n (2023)\\n \\nPROJECTS \\nAI Resume Analyzer and LinkedIn Scraper with Selenium \\nBuilt an \\nInnovative \\nAI-driven Streamlit app with LLM, OpenAI for\\nprecise resume analysis and suggestions. Integrated Selenium for\\ndynamic LinkedIn data extraction, enhancing career insights. \\nAI excels in resume analysis - summarizing, strengths, weaknesses,',\n",
|
66 |
+
" 'precise resume analysis and suggestions. Integrated Selenium for\\ndynamic LinkedIn data extraction, enhancing career insights. \\nAI excels in resume analysis - summarizing, strengths, weaknesses,\\nand suggesting job titles. Leveraging Selenium for LinkedIn data, it\\nstreamlines job searches for comprehensive career insights. \\nTools: Python, LLM, OpenAI, Selenium, Streamlit, Numpy, Pandas.\\n \\ngithub.com/gopiashokan/AI-Resume-Analyzer-LinkedIn-Scraper.git\\n \\nRetail Sales Forecast \\nImplemented ML for precise retail sales predictions, emphasizing\\npreprocessing and algorithm selection. Streamlined Streamlit app\\nintegrates EDA, \\noptimizing decision-making in dynamic retail.',\n",
|
67 |
+
" 'Implemented ML for precise retail sales predictions, emphasizing\\npreprocessing and algorithm selection. Streamlined Streamlit app\\nintegrates EDA, \\noptimizing decision-making in dynamic retail. \\nRevolutionized retail decisions with advanced ML, using a streamlit\\napplication integrating EDA for precise sales forecasts, \\nfeature\\ncomparison & actionable insights by identifying trends & patterns. \\nTools: Python, \\nsklearn, PostgreSQL, Streamlit, Numpy, Pandas, Plotly,\\nMatplotlib, Seaborn.\\n \\nhttps://github.com/gopiashokan/Retail-Sales-Forecast.git\\n \\nIndustrial Copper Modeling \\nLeveraged advanced ML regression models for precise pricing and\\nclassification, enhancing targeted customer engagement by',\n",
|
68 |
+
" 'Industrial Copper Modeling \\nLeveraged advanced ML regression models for precise pricing and\\nclassification, enhancing targeted customer engagement by\\npredicting potential customers in the copper industry landscape. \\nExpert in data preprocessing, feature engineering, cross-validation,\\nhyperparameter tuning, and Streamlit app development, \\nskillfully\\napplying the skills to solve real-world manufacturing challenges. \\nTools: Python, sklearn, Streamlit, Matplotlib,Seaborn, Numpy,Pandas.\\n \\ngithub.com/gopiashokan/Industrial-Copper-Modeling.git\\n \\nAirbnb Analysis \\nLeverage Streamlit for dynamic exploratory data analysis (EDA)\\nwith interactive charts. Extend insights through a comprehensive',\n",
|
69 |
+
" 'Airbnb Analysis \\nLeverage Streamlit for dynamic exploratory data analysis (EDA)\\nwith interactive charts. Extend insights through a comprehensive\\nTableau dashboard, uncovering trends and patterns in the dataset. \\nAnalyzed pricing dynamics & availability patterns in the Hospitality\\nsector, enabling informed decision-making and empowering\\nstakeholders to make choices based on insights and visualizations. \\nTools: Python, MongoDB, PostgreSQL, Tableau, Streamlit, Plotly,\\nPandas.\\n \\ngithub.com/gopiashokan/Airbnb-Analysis.git']"
|
70 |
+
]
|
71 |
+
},
|
72 |
+
"execution_count": 64,
|
73 |
+
"metadata": {},
|
74 |
+
"output_type": "execute_result"
|
75 |
+
}
|
76 |
+
],
|
77 |
+
"source": [
|
78 |
+
"# Split the long text into small chunks\n",
|
79 |
+
"text_splitter = RecursiveCharacterTextSplitter(chunk_size=700,\n",
|
80 |
+
" chunk_overlap=200,\n",
|
81 |
+
" length_function=len)\n",
|
82 |
+
"\n",
|
83 |
+
"chunks = text_splitter.split_text(text=text)\n",
|
84 |
+
"chunks"
|
85 |
+
]
|
86 |
+
},
|
87 |
+
{
|
88 |
+
"cell_type": "code",
|
89 |
+
"execution_count": 65,
|
90 |
+
"metadata": {},
|
91 |
+
"outputs": [
|
92 |
+
{
|
93 |
+
"data": {
|
94 |
+
"text/plain": [
|
95 |
+
"'GOPINATH ASOKAN \\nData Science Enthusiast \\nPassionate data science enthusiast with a strong foundation in diverse industries. Equipped with 5+ years\\nof industry experience, highly skilled in problem-solving, and project management. Eager to seamlessly\\nmerge analytical skills with artistic expertise for impactful insights and innovation. Excited to apply data-\\ndriven strategies to challenges, contribute proactively and effectively to the field, and drive innovation. \\[email protected] \\nlinkedin.com/in/gopiashokan \\ngithub.com/gopiashokan \\nWORK EXPERIENCE \\nSenior Process Executive - Operations \\nMahendra Next Wealth IT India Pvt Ltd \\n05/2019 - 12/2022\\n, \\n \\nNamakkal'"
|
96 |
+
]
|
97 |
+
},
|
98 |
+
"execution_count": 65,
|
99 |
+
"metadata": {},
|
100 |
+
"output_type": "execute_result"
|
101 |
+
}
|
102 |
+
],
|
103 |
+
"source": [
|
104 |
+
"chunks[0]"
|
105 |
+
]
|
106 |
+
},
|
107 |
+
{
|
108 |
+
"cell_type": "code",
|
109 |
+
"execution_count": 66,
|
110 |
+
"metadata": {},
|
111 |
+
"outputs": [
|
112 |
+
{
|
113 |
+
"data": {
|
114 |
+
"text/plain": [
|
115 |
+
"\"linkedin.com/in/gopiashokan \\ngithub.com/gopiashokan \\nWORK EXPERIENCE \\nSenior Process Executive - Operations \\nMahendra Next Wealth IT India Pvt Ltd \\n05/2019 - 12/2022\\n, \\n \\nNamakkal \\nProficiently executed image editing tasks for bigbasket's\\nproduct images, encompassing renaming, retouching, \\ncolor\\ncorrection, content cropping, and photo manipulation. \\nExpertly designed captivating banners and creatives for\\nadvertisements, skillfully integrating combo packs, multi-\\npacks, and hero images into Bigbasket's product pages. \\nContributed to taxonomy by mapping tax codes, manually\\nidentified competitor products, and verified AI-generated\\noutputs for accuracy, assisting in AI improvement efforts.\""
|
116 |
+
]
|
117 |
+
},
|
118 |
+
"execution_count": 66,
|
119 |
+
"metadata": {},
|
120 |
+
"output_type": "execute_result"
|
121 |
+
}
|
122 |
+
],
|
123 |
+
"source": [
|
124 |
+
"chunks[1]"
|
125 |
+
]
|
126 |
+
},
|
127 |
+
{
|
128 |
+
"cell_type": "markdown",
|
129 |
+
"metadata": {},
|
130 |
+
"source": [
|
131 |
+
"\"linkedin.com/in/gopiashokan \\ngithub.com/gopiashokan \\nWORK EXPERIENCE \\nSenior Process Executive - Operations \\nMahendra Next Wealth IT India Pvt Ltd \\n05/2019 - 12/2022\\n, \\n \\nNamakkal\"\n",
|
132 |
+
"\n",
|
133 |
+
"The above text is common(overlap) for both chunks[0] and chunks[1].\n",
|
134 |
+
"(chunk_overlap=200 - maximum length, it means length is not exceed 200)"
|
135 |
+
]
|
136 |
+
},
|
137 |
+
{
|
138 |
+
"cell_type": "code",
|
139 |
+
"execution_count": 7,
|
140 |
+
"metadata": {},
|
141 |
+
"outputs": [],
|
142 |
+
"source": [
|
143 |
+
"openai_api_key = input('Enter you OpenAI API Key: ')"
|
144 |
+
]
|
145 |
+
},
|
146 |
+
{
|
147 |
+
"cell_type": "code",
|
148 |
+
"execution_count": 52,
|
149 |
+
"metadata": {},
|
150 |
+
"outputs": [],
|
151 |
+
"source": [
|
152 |
+
"def openai(openai_api_key, chunks, analyze):\n",
|
153 |
+
"\n",
|
154 |
+
" # Using OpenAI service for embedding\n",
|
155 |
+
" embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)\n",
|
156 |
+
"\n",
|
157 |
+
" # Facebook AI Similarity Serach library help us to convert text data to numerical vector\n",
|
158 |
+
" vectorstores = FAISS.from_texts(chunks, embedding=embeddings)\n",
|
159 |
+
"\n",
|
160 |
+
" # compares the query and chunks, enabling the selection of the top 'K' most similar chunks based on their similarity scores.\n",
|
161 |
+
" docs = vectorstores.similarity_search(query=analyze, k=3)\n",
|
162 |
+
"\n",
|
163 |
+
" # creates an OpenAI object, using the ChatGPT 3.5 Turbo model\n",
|
164 |
+
" llm = ChatOpenAI(model='gpt-3.5-turbo', api_key=openai_api_key)\n",
|
165 |
+
"\n",
|
166 |
+
" # question-answering (QA) pipeline, making use of the load_qa_chain function\n",
|
167 |
+
" chain = load_qa_chain(llm=llm, chain_type='stuff')\n",
|
168 |
+
"\n",
|
169 |
+
" response = chain.run(input_documents=docs, question=analyze)\n",
|
170 |
+
" return response"
|
171 |
+
]
|
172 |
+
},
|
173 |
+
{
|
174 |
+
"cell_type": "code",
|
175 |
+
"execution_count": 55,
|
176 |
+
"metadata": {},
|
177 |
+
"outputs": [
|
178 |
+
{
|
179 |
+
"name": "stdout",
|
180 |
+
"output_type": "stream",
|
181 |
+
"text": [
|
182 |
+
"The resume belongs to Gopinath Asokan, who is a data science enthusiast with a strong foundation in diverse industries. He has 5+ years of industry experience and is highly skilled in problem-solving and project management. Gopinath is eager to merge his analytical skills with artistic expertise for impactful insights and innovation. He is excited to apply data-driven strategies to challenges and contribute proactively to the field. \n",
|
183 |
+
"\n",
|
184 |
+
"In terms of work experience, Gopinath has worked as a Senior Process Executive - Operations at Mahendra Next Wealth IT India Pvt Ltd from 05/2019 to 12/2022. He was responsible for precise resume analysis and suggestions, as well as integrating Selenium for dynamic LinkedIn data extraction. He also implemented machine learning for precise retail sales predictions, emphasizing preprocessing and algorithm selection.\n",
|
185 |
+
"\n",
|
186 |
+
"Gopinath has also worked as an Associate Engineer - Quality at Rudra Blades and Edges Pvt Ltd from 07/2018 to 12/2018. He performed continuous and comprehensive material analysis to ensure structural integrity and maintained high-quality standards at critical stations.\n",
|
187 |
+
"\n",
|
188 |
+
"Furthermore, Gopinath worked as a Graduate Engineer Trainee - Quality at Lear Automotive India Pvt Ltd from 07/2016 to 07/2017. He efficiently managed productive customer meetings and maintained stock alignment.\n",
|
189 |
+
"\n",
|
190 |
+
"In terms of education, Gopinath has a Master's degree in Data Science from GUVI Geeks Network Pvt Ltd (expected completion in 2023) and a Bachelor's degree in Mechanical Engineering from Knowledge Institute of Technology (2012-2016).\n",
|
191 |
+
"\n",
|
192 |
+
"Gopinath possesses a wide range of technical skills, including Python, PostgreSQL, MongoDB, Tableau, PowerBI, Machine Learning, Deep Learning, NLP, LLM, OpenAI, Selenium, Airflow, Hadoop, PySpark, OCR, Numpy, Pandas, Streamlit, Plotly, Matplotlib, and Seaborn.\n",
|
193 |
+
"\n",
|
194 |
+
"He has also completed the Microsoft AI-900 Azure AI Fundamentals certificate.\n",
|
195 |
+
"\n",
|
196 |
+
"Gopinath has worked on several projects, including an AI Resume Analyzer and LinkedIn Scraper with Selenium, a Retail Sales Forecast using ML, Industrial Copper Modeling using ML, and Airbnb Analysis using Streamlit and Tableau.\n",
|
197 |
+
"\n",
|
198 |
+
"In conclusion, Gopinath Asokan is a highly skilled data science enthusiast with a diverse industry background. He has experience in resume analysis, retail sales predictions, and quality assurance. He is proficient in various technical skills and has completed relevant certifications. Gopinath has also worked on several data science projects, showcasing his expertise in machine learning and analysis.\n"
|
199 |
+
]
|
200 |
+
}
|
201 |
+
],
|
202 |
+
"source": [
|
203 |
+
"def resume_summary(query_with_chunks):\n",
|
204 |
+
" query = f''' need to detailed summarization of below resume and finally conclude them\n",
|
205 |
+
"\n",
|
206 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
207 |
+
" {query_with_chunks}\n",
|
208 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
209 |
+
" '''\n",
|
210 |
+
" return query\n",
|
211 |
+
"\n",
|
212 |
+
"summary = resume_summary(query_with_chunks=chunks)\n",
|
213 |
+
"summary_result = openai(openai_api_key=openai_api_key, chunks=chunks, analyze=summary)\n",
|
214 |
+
"print(summary_result)"
|
215 |
+
]
|
216 |
+
},
|
217 |
+
{
|
218 |
+
"cell_type": "code",
|
219 |
+
"execution_count": 56,
|
220 |
+
"metadata": {},
|
221 |
+
"outputs": [
|
222 |
+
{
|
223 |
+
"name": "stdout",
|
224 |
+
"output_type": "stream",
|
225 |
+
"text": [
|
226 |
+
"Strengths of Gopinath Asokan's resume:\n",
|
227 |
+
"\n",
|
228 |
+
"1. Strong foundation in diverse industries: Gopinath's resume highlights his experience and expertise in various industries, showcasing his adaptability and ability to work in different environments.\n",
|
229 |
+
"\n",
|
230 |
+
"2. 5+ years of industry experience: Gopinath's extensive experience in the industry demonstrates his ability to handle real-world challenges and shows his level of expertise in the field.\n",
|
231 |
+
"\n",
|
232 |
+
"3. Strong problem-solving and project management skills: Gopinath's resume emphasizes his skills in problem-solving and project management, which are crucial in the field of data science. This indicates his ability to effectively handle complex problems and successfully manage projects.\n",
|
233 |
+
"\n",
|
234 |
+
"4. Analytical skills combined with artistic expertise: Gopinath's resume mentions his eagerness to merge his analytical skills with artistic expertise, indicating his ability to think creatively and produce impactful insights and innovations.\n",
|
235 |
+
"\n",
|
236 |
+
"5. Data-driven strategies: Gopinath's excitement to apply data-driven strategies to challenges shows his understanding of the importance of data analysis in decision-making and problem-solving.\n",
|
237 |
+
"\n",
|
238 |
+
"6. Proficient in technical skills: Gopinath possesses a wide range of technical skills, including Python, machine learning, NLP, Selenium, and more. This showcases his ability to utilize various tools and technologies to solve complex problems and deliver high-quality work.\n",
|
239 |
+
"\n",
|
240 |
+
"7. Relevant certifications: Gopinath's completion of the Microsoft AI-900 Azure AI Fundamentals certificate demonstrates his commitment to continuous learning and staying up-to-date with the latest technologies and advancements in the field of data science.\n",
|
241 |
+
"\n",
|
242 |
+
"8. Experienced in various projects: Gopinath's experience in projects such as AI Resume Analyzer and LinkedIn Scraper, Retail Sales Forecast using ML, and Industrial Copper Modeling using ML showcases his practical application of data science techniques and his ability to deliver successful projects.\n",
|
243 |
+
"\n",
|
244 |
+
"In conclusion, Gopinath Asokan's resume exhibits strengths in his industry experience, problem-solving skills, technical expertise, and project management abilities. His diverse background, eagerness to merge analytical and artistic skills, and relevant certifications make him a strong candidate in the field of data science.\n"
|
245 |
+
]
|
246 |
+
}
|
247 |
+
],
|
248 |
+
"source": [
|
249 |
+
"def resume_strength(query_with_chunks):\n",
|
250 |
+
" query = f'''need to detailed analysis and explain of the strength of below resume and finally conclude them\n",
|
251 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
252 |
+
" {query_with_chunks}\n",
|
253 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
254 |
+
" '''\n",
|
255 |
+
" return query\n",
|
256 |
+
"\n",
|
257 |
+
"strength = resume_strength(query_with_chunks=summary_result)\n",
|
258 |
+
"strength_result = openai(openai_api_key=openai_api_key, chunks=chunks, analyze=strength)\n",
|
259 |
+
"print(strength_result)"
|
260 |
+
]
|
261 |
+
},
|
262 |
+
{
|
263 |
+
"cell_type": "code",
|
264 |
+
"execution_count": 60,
|
265 |
+
"metadata": {},
|
266 |
+
"outputs": [
|
267 |
+
{
|
268 |
+
"name": "stdout",
|
269 |
+
"output_type": "stream",
|
270 |
+
"text": [
|
271 |
+
"Based on the provided resume details, Gopinath Asokan has a strong foundation in diverse industries and is highly skilled in problem-solving and project management. However, there are a few weaknesses in the resume that can be improved:\n",
|
272 |
+
"\n",
|
273 |
+
"1. Lack of a clear career objective: The resume does not mention a specific career objective or goal. It would be beneficial to include a clear and concise objective statement that highlights Gopinath's career aspirations and how his skills and experience align with those goals.\n",
|
274 |
+
"\n",
|
275 |
+
"2. Incomplete work experience details: While the resume mentions Gopinath's job titles and responsibilities, it does not provide specific accomplishments or achievements in each role. Adding quantifiable achievements or results-oriented statements would strengthen the resume and demonstrate Gopinath's impact in previous positions.\n",
|
276 |
+
"\n",
|
277 |
+
"3. Limited information on education: The resume briefly mentions Gopinath's educational background, but it lacks details on specific coursework or projects related to data science. Including relevant coursework, research projects, or any notable academic achievements would enhance the resume's credibility and showcase Gopinath's academic abilities.\n",
|
278 |
+
"\n",
|
279 |
+
"4. Lack of focus on key technical skills: Although the resume mentions a wide range of technical skills, it does not highlight which skills are most relevant to the data science field. It would be helpful to prioritize and emphasize the key technical skills that directly align with the desired job roles in data science.\n",
|
280 |
+
"\n",
|
281 |
+
"To improve the resume, consider the following suggestions:\n",
|
282 |
+
"\n",
|
283 |
+
"1. Start with a strong career objective statement that clearly communicates Gopinath's goals and how his skills and experience align with those goals.\n",
|
284 |
+
"\n",
|
285 |
+
"2. Include specific accomplishments and achievements in each work experience entry, highlighting the impact Gopinath made in previous roles. Use quantitative metrics whenever possible to showcase results.\n",
|
286 |
+
"\n",
|
287 |
+
"3. Provide more details on relevant coursework, research projects, or academic achievements related to data science during Gopinath's Master's degree program.\n",
|
288 |
+
"\n",
|
289 |
+
"4. Prioritize and highlight the key technical skills that directly align with data science roles. Consider creating a separate section dedicated to technical skills, showcasing proficiency and experience in those areas.\n",
|
290 |
+
"\n",
|
291 |
+
"5. Consider including any relevant certifications, online courses, or workshops related to data science or machine learning.\n",
|
292 |
+
"\n",
|
293 |
+
"By addressing these weaknesses and implementing these improvements, Gopinath Asokan can create a stronger and more impactful resume that highlights his skills, experience, and potential in the field of data science.\n"
|
294 |
+
]
|
295 |
+
}
|
296 |
+
],
|
297 |
+
"source": [
|
298 |
+
"def resume_weakness(query_with_chunks):\n",
|
299 |
+
" query = f'''need to detailed analysis and explain of the weakness of below resume and how to improve make a better resume.\n",
|
300 |
+
"\n",
|
301 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
302 |
+
" {query_with_chunks}\n",
|
303 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
304 |
+
" '''\n",
|
305 |
+
" return query\n",
|
306 |
+
"\n",
|
307 |
+
"weakness = resume_weakness(query_with_chunks=summary_result)\n",
|
308 |
+
"result_weakness = openai(openai_api_key=openai_api_key, chunks=chunks, analyze=weakness)\n",
|
309 |
+
"print(result_weakness)"
|
310 |
+
]
|
311 |
+
},
|
312 |
+
{
|
313 |
+
"cell_type": "code",
|
314 |
+
"execution_count": 61,
|
315 |
+
"metadata": {},
|
316 |
+
"outputs": [
|
317 |
+
{
|
318 |
+
"name": "stdout",
|
319 |
+
"output_type": "stream",
|
320 |
+
"text": [
|
321 |
+
"Based on the information provided, some potential job roles that Gopinath Asokan could apply to on LinkedIn include:\n",
|
322 |
+
"\n",
|
323 |
+
"1. Data Scientist\n",
|
324 |
+
"2. Data Analyst\n",
|
325 |
+
"3. Machine Learning Engineer\n",
|
326 |
+
"4. Business Analyst\n",
|
327 |
+
"5. Project Manager\n",
|
328 |
+
"6. Operations Analyst\n",
|
329 |
+
"7. Quality Assurance Engineer\n",
|
330 |
+
"8. Sales Analyst\n",
|
331 |
+
"9. AI Engineer\n",
|
332 |
+
"10. Retail Analyst\n",
|
333 |
+
"\n",
|
334 |
+
"These job roles align with Gopinath's skills and experience in data science, problem-solving, project management, resume analysis, retail sales forecasting, and quality assurance.\n"
|
335 |
+
]
|
336 |
+
}
|
337 |
+
],
|
338 |
+
"source": [
|
339 |
+
"def job_title_suggestion(query_with_chunks):\n",
|
340 |
+
"\n",
|
341 |
+
" query = f''' what are the job roles i apply to likedin based on below?\n",
|
342 |
+
" \n",
|
343 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
344 |
+
" {query_with_chunks}\n",
|
345 |
+
" \"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\n",
|
346 |
+
" '''\n",
|
347 |
+
" return query\n",
|
348 |
+
"\n",
|
349 |
+
"suggestion = job_title_suggestion(query_with_chunks=summary_result)\n",
|
350 |
+
"result_suggestion = openai(openai_api_key=openai_api_key, chunks=chunks, analyze=suggestion)\n",
|
351 |
+
"print(result_suggestion)"
|
352 |
+
]
|
353 |
+
},
|
354 |
+
{
|
355 |
+
"cell_type": "code",
|
356 |
+
"execution_count": null,
|
357 |
+
"metadata": {},
|
358 |
+
"outputs": [],
|
359 |
+
"source": []
|
360 |
+
}
|
361 |
+
],
|
362 |
+
"metadata": {
|
363 |
+
"kernelspec": {
|
364 |
+
"display_name": "Python 3",
|
365 |
+
"language": "python",
|
366 |
+
"name": "python3"
|
367 |
+
},
|
368 |
+
"language_info": {
|
369 |
+
"codemirror_mode": {
|
370 |
+
"name": "ipython",
|
371 |
+
"version": 3
|
372 |
+
},
|
373 |
+
"file_extension": ".py",
|
374 |
+
"mimetype": "text/x-python",
|
375 |
+
"name": "python",
|
376 |
+
"nbconvert_exporter": "python",
|
377 |
+
"pygments_lexer": "ipython3",
|
378 |
+
"version": "3.11.6"
|
379 |
+
},
|
380 |
+
"orig_nbformat": 4
|
381 |
+
},
|
382 |
+
"nbformat": 4,
|
383 |
+
"nbformat_minor": 2
|
384 |
+
}
|
LICENSE.md
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
MIT License
|
2 |
+
|
3 |
+
Copyright (c) 2023 gopiashokan
|
4 |
+
|
5 |
+
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the “Software”), to deal in the Software without restriction, including without limitation the rights to use, copy,
|
6 |
+
modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
|
7 |
+
|
8 |
+
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
9 |
+
|
10 |
+
THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
11 |
+
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
LinkedIn_scraper_with_Selenium.ipynb
ADDED
@@ -0,0 +1,1452 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "code",
|
5 |
+
"execution_count": 1,
|
6 |
+
"metadata": {},
|
7 |
+
"outputs": [],
|
8 |
+
"source": [
|
9 |
+
"import time\n",
|
10 |
+
"import numpy as np\n",
|
11 |
+
"import pandas as pd\n",
|
12 |
+
"from selenium import webdriver\n",
|
13 |
+
"from selenium.webdriver.common.by import By\n",
|
14 |
+
"from selenium.webdriver.common.keys import Keys\n",
|
15 |
+
"import warnings\n",
|
16 |
+
"warnings.filterwarnings('ignore')"
|
17 |
+
]
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"cell_type": "code",
|
21 |
+
"execution_count": 118,
|
22 |
+
"metadata": {},
|
23 |
+
"outputs": [
|
24 |
+
{
|
25 |
+
"name": "stdout",
|
26 |
+
"output_type": "stream",
|
27 |
+
"text": [
|
28 |
+
"['data', 'scientist', 'artificial', 'intelligence', 'ai']\n"
|
29 |
+
]
|
30 |
+
}
|
31 |
+
],
|
32 |
+
"source": [
|
33 |
+
"user_input_job_title = input('Enter Job Titles (with comma separated):').split()\n",
|
34 |
+
"print(user_input_job_title)"
|
35 |
+
]
|
36 |
+
},
|
37 |
+
{
|
38 |
+
"cell_type": "code",
|
39 |
+
"execution_count": 119,
|
40 |
+
"metadata": {},
|
41 |
+
"outputs": [
|
42 |
+
{
|
43 |
+
"name": "stdout",
|
44 |
+
"output_type": "stream",
|
45 |
+
"text": [
|
46 |
+
"data%2C%20scientist%2C%20artificial%2C%20intelligence%2C%20ai\n"
|
47 |
+
]
|
48 |
+
}
|
49 |
+
],
|
50 |
+
"source": [
|
51 |
+
"b = []\n",
|
52 |
+
"for i in user_input_job_title:\n",
|
53 |
+
" x = i.split()\n",
|
54 |
+
" y = '%20'.join(x)\n",
|
55 |
+
" b.append(y)\n",
|
56 |
+
"\n",
|
57 |
+
"job_title = '%2C%20'.join(b)\n",
|
58 |
+
"print(job_title)"
|
59 |
+
]
|
60 |
+
},
|
61 |
+
{
|
62 |
+
"cell_type": "code",
|
63 |
+
"execution_count": 120,
|
64 |
+
"metadata": {},
|
65 |
+
"outputs": [
|
66 |
+
{
|
67 |
+
"name": "stdout",
|
68 |
+
"output_type": "stream",
|
69 |
+
"text": [
|
70 |
+
"https://in.linkedin.com/jobs/search?keywords=data%2C%20scientist%2C%20artificial%2C%20intelligence%2C%20ai&location=India&locationId=&geoId=102713980&f_TPR=r604800&position=1&pageNum=0\n"
|
71 |
+
]
|
72 |
+
}
|
73 |
+
],
|
74 |
+
"source": [
|
75 |
+
"link = f\"https://in.linkedin.com/jobs/search?keywords={job_title}&location=India&locationId=&geoId=102713980&f_TPR=r604800&position=1&pageNum=0\"\n",
|
76 |
+
"print(link)"
|
77 |
+
]
|
78 |
+
},
|
79 |
+
{
|
80 |
+
"cell_type": "code",
|
81 |
+
"execution_count": 121,
|
82 |
+
"metadata": {},
|
83 |
+
"outputs": [],
|
84 |
+
"source": [
|
85 |
+
"driver = webdriver.Chrome()\n",
|
86 |
+
"driver.maximize_window()\n",
|
87 |
+
"\n",
|
88 |
+
"driver.get(link)\n",
|
89 |
+
"driver.implicitly_wait(10)"
|
90 |
+
]
|
91 |
+
},
|
92 |
+
{
|
93 |
+
"cell_type": "code",
|
94 |
+
"execution_count": 122,
|
95 |
+
"metadata": {},
|
96 |
+
"outputs": [],
|
97 |
+
"source": [
|
98 |
+
"for i in range(0,2):\n",
|
99 |
+
" driver.execute_script(\"window.scrollTo(0, document.body.scrollHeight);\")\n",
|
100 |
+
" time.sleep(5)\n",
|
101 |
+
"\n",
|
102 |
+
" try:\n",
|
103 |
+
" x = driver.find_element(by=By.CSS_SELECTOR, value=\"button[aria-label='See more jobs']\").click()\n",
|
104 |
+
" time.sleep(3)\n",
|
105 |
+
" except:\n",
|
106 |
+
" pass"
|
107 |
+
]
|
108 |
+
},
|
109 |
+
{
|
110 |
+
"cell_type": "code",
|
111 |
+
"execution_count": 123,
|
112 |
+
"metadata": {},
|
113 |
+
"outputs": [
|
114 |
+
{
|
115 |
+
"name": "stdout",
|
116 |
+
"output_type": "stream",
|
117 |
+
"text": [
|
118 |
+
"['nasscom', 'Deloitte', 'L&T Technology Services', 'Predis.ai', 'Api Logistics', 'E2E Networks Limited', 'Sony Pictures Networks India', 'Factspan', 'MakeMyTrip', 'Google', 'Jupiter AI Labs ✔', 'Persistent Systems', 'CodeRoofs IT Solutions', 'Xenspire', 'Unicorn Metric AI', 'LENS Corporation', 'Qualcomm', 'Intellect Design Arena Ltd', 'Connect Tech+Talent', 'CloudMoyo', 'Infosys', 'LatentView Analytics', 'HARMAN International', 'MakeMyTrip', 'Fulcrum Digital Inc', 'Shree Cement Ltd.', 'Scaletorch.ai', 'Deloitte', 'Zacks Investment Research', 'bluCognition', 'Fulcrum Digital Inc', 'Enterpret', 'LTIMindtree', 'ACN Infotech (India) Pvt. Ltd.', 'HealthifyMe', 'IBM', 'Partify AI', 'Viga Entertainment Technology', 'Neoma', 'Microsoft', 'CyberSealAI', 'IBM', 'Hyperhire', 'Level AI', 'klimbB', 'IBM', 'Microsoft', 'LTIMindtree', 'LTIMindtree', 'LTIMindtree', 'LTIMindtree', 'LTIMindtree', 'Unilever', 'LTIMindtree', 'Roku Inc.', 'MapOut', 'Tata Consultancy Services', 'Techfastic', 'Quantium', 'Simplismart', 'AjnaLens', 'Quantiphi', 'Quantiphi', 'LTIMindtree', 'IBM', 'Neoma', 'IBM', 'Dew Software', 'IBM', 'Fusion Plus Solutions Inc', 'VIZON', 'Singular Intelligence', 'LTIMindtree', 'A.P. Moller - Maersk', 'Epiq']\n"
|
119 |
+
]
|
120 |
+
}
|
121 |
+
],
|
122 |
+
"source": [
|
123 |
+
"company_name = []\n",
|
124 |
+
"\n",
|
125 |
+
"try:\n",
|
126 |
+
" company = driver.find_elements(by=By.CSS_SELECTOR, value='h4[class=\"base-search-card__subtitle\"]')\n",
|
127 |
+
" for i in company:\n",
|
128 |
+
" company_name.append(i.text)\n",
|
129 |
+
"\n",
|
130 |
+
"except:\n",
|
131 |
+
" pass\n",
|
132 |
+
"\n",
|
133 |
+
"print(company_name)"
|
134 |
+
]
|
135 |
+
},
|
136 |
+
{
|
137 |
+
"cell_type": "code",
|
138 |
+
"execution_count": 124,
|
139 |
+
"metadata": {},
|
140 |
+
"outputs": [
|
141 |
+
{
|
142 |
+
"name": "stdout",
|
143 |
+
"output_type": "stream",
|
144 |
+
"text": [
|
145 |
+
"['Artificial Intelligence (AI)', 'Data Scientist', 'Data Scientist', 'Data Science', 'Artificial Intelligence (AI)', 'Artificial Intelligence (AI)', 'Data Scientist, Digital Business', 'Data Scientist', 'Senior/Lead Data Scientist', 'Business Data Scientist, Machine Learning', 'Machine Learning Engineer', 'Senior Data Scientist', 'Generative Artificial Intelligence (AI)', 'AI/ML- Python Engineer', 'Machine Learning Engineer', 'Artificial Intelligence Researcher', 'Engineer - AI ML', 'Machine Learning Engineer', 'Senior Data Scientist (Battery Industry)', 'Data Scientist', 'Data Analyst', 'AI Application Developer', 'Data Scientist', 'Principal Data Scientist', 'ML Engineer', 'Data Scientist', 'AI Researcher (Computer Vision)', 'Data Science Consultant', 'Machine Learning Engineer', 'Machine Learning Engineer', 'ML Engineer', 'Machine Learning Engineer', 'AI/ML with Azure', 'Data Scientist', 'Machine Learning Engineer', 'Data Scientist: Artificial Intelligence', 'AI/ML Developer Intern', 'AI Engineer', 'Machine Learning Engineer', 'Senior Data Scientist', 'Machine Learning & AI Specialist (IIT)', 'Data Scientist: Artificial Intelligence', 'Artificial Intelligence Researcher', 'Machine Learning Engineer (Speech) - AI (Remote,India)', 'Principal Data Scientist - AI / ML BI (up to 40 LPA)', 'Data Scientist: Artificial Intelligence', 'Senior Data Scientist', 'Machine Learning Ops Engineer', 'NLP & Chat GPT', 'Data Science+ GCP', 'Data Science+ GCP', 'Data Science+ GCP', 'Assistant Manager - Data Science', 'Data Science+ GCP', 'Sr. Software Engineer - Machine Learning', 'Machine Learning Engineer', 'Machine Learning Engineer', 'Data Scientist', 'Senior Machine Learning Engineer', 'Machine Learning Engineer', 'Data Scientist', 'Machine Learning Engineer - NLP', 'Machine Learning Engineer - NLP', 'AI/ML with Azure', 'Data Scientist: Advanced Analytics', 'AI/ML Programmer', 'Data Scientist: Advanced Analytics', 'Machine Learning Engineer/SRE', 'Data Scientist: Advanced Analytics', 'Machine learning & artificial Intelligence', 'AI Developer', 'Senior Data Scientist', 'AI/ML with Azure', 'Data Product Lead - Machine Learning', 'Sr AI/ML Consultant']\n"
|
146 |
+
]
|
147 |
+
}
|
148 |
+
],
|
149 |
+
"source": [
|
150 |
+
"\n",
|
151 |
+
"job_title = []\n",
|
152 |
+
"\n",
|
153 |
+
"try:\n",
|
154 |
+
" title = driver.find_elements(by=By.CSS_SELECTOR, value='h3[class=\"base-search-card__title\"]')\n",
|
155 |
+
" for i in title:\n",
|
156 |
+
" job_title.append(i.text)\n",
|
157 |
+
"\n",
|
158 |
+
"except:\n",
|
159 |
+
" pass\n",
|
160 |
+
"\n",
|
161 |
+
"print(job_title)"
|
162 |
+
]
|
163 |
+
},
|
164 |
+
{
|
165 |
+
"cell_type": "code",
|
166 |
+
"execution_count": 125,
|
167 |
+
"metadata": {},
|
168 |
+
"outputs": [
|
169 |
+
{
|
170 |
+
"name": "stdout",
|
171 |
+
"output_type": "stream",
|
172 |
+
"text": [
|
173 |
+
"['Noida, Uttar Pradesh, India', 'Gurugram, Haryana, India', 'Hyderabad, Telangana, India', 'Pune, Maharashtra, India', 'Gurgaon, Haryana, India', 'Delhi, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Pune, Maharashtra, India', 'Sahibzada Ajit Singh Nagar, Punjab, India', 'Hyderabad, Telangana, India', 'India', 'Gurugram, Haryana, India', 'Hyderabad, Telangana, India', 'Pune, Maharashtra, India', 'Hyderabad, Telangana, India', 'Pune, Maharashtra, India', 'Bengaluru East, Karnataka, India', 'Chennai, Tamil Nadu, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Mumbai, Maharashtra, India', 'Kolkata, West Bengal, India', 'India', 'Gurugram, Haryana, India', 'Kolkata, West Bengal, India', 'India', 'Mumbai, Maharashtra, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Hyderabad, Telangana, India', 'Bengaluru, Karnataka, India', 'Hyderabad, Telangana, India', 'India', 'Bengaluru, Karnataka, India', 'India', 'Bengaluru, Karnataka, India', 'India', 'Hyderabad, Telangana, India', 'India', 'India', 'Bengaluru, Karnataka, India', 'Hyderabad, Telangana, India', 'Mumbai, Maharashtra, India', 'Bengaluru, Karnataka, India', 'Pune, Maharashtra, India', 'Pune, Maharashtra, India', 'Mumbai, Maharashtra, India', 'Hyderabad, Telangana, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'Bengaluru, Karnataka, India', 'India', 'Bengaluru, Karnataka, India', 'India', 'Hyderabad, Telangana, India', 'Bengaluru, Karnataka, India', 'Thane, Maharashtra, India', 'Mumbai, Maharashtra, India', 'Mumbai, Maharashtra, India', 'Chennai, Tamil Nadu, India', 'Gurgaon, Haryana, India', 'India', 'Gurgaon, Haryana, India', 'Mumbai, Maharashtra, India', 'Gurgaon, Haryana, India', 'Hyderabad, Telangana, India', 'Dehradun, Uttarakhand, India', 'Bengaluru, Karnataka, India', 'Coimbatore, Tamil Nadu, India', 'Bengaluru, Karnataka, India', 'Hyderabad, Telangana, India']\n"
|
174 |
+
]
|
175 |
+
}
|
176 |
+
],
|
177 |
+
"source": [
|
178 |
+
"company_location = []\n",
|
179 |
+
"\n",
|
180 |
+
"try:\n",
|
181 |
+
" location = driver.find_elements(by=By.CSS_SELECTOR, value='span[class=\"job-search-card__location\"]')\n",
|
182 |
+
" for i in location:\n",
|
183 |
+
" company_location.append(i.text)\n",
|
184 |
+
"\n",
|
185 |
+
"except:\n",
|
186 |
+
" pass\n",
|
187 |
+
"\n",
|
188 |
+
"print(company_location)"
|
189 |
+
]
|
190 |
+
},
|
191 |
+
{
|
192 |
+
"cell_type": "code",
|
193 |
+
"execution_count": 126,
|
194 |
+
"metadata": {},
|
195 |
+
"outputs": [
|
196 |
+
{
|
197 |
+
"name": "stdout",
|
198 |
+
"output_type": "stream",
|
199 |
+
"text": [
|
200 |
+
"['https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-nasscom-3775137519?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=tIIHxWA8tlGFmgU1MGBbpQ%3D%3D&position=1&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-deloitte-3769279295?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=V1WsNRc58GvBk7auB%2FKsyw%3D%3D&position=2&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-l-t-technology-services-3767553599?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=ZIbaag6Y46gRRLftn3vpFQ%3D%3D&position=3&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science-at-predis-ai-3775621176?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=0HQpUWgxam%2F0xV1ukpEcpg%3D%3D&position=4&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-api-logistics-3775135785?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=gHlkHtkA61GpIwctphtUoA%3D%3D&position=5&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-e2e-networks-limited-3776977208?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=fqeNZz9ofq7yszds72vw%2FA%3D%3D&position=6&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-digital-business-at-sony-pictures-networks-india-3776752226?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=ro2SZIXOxz8jXY%2BpYvfkjA%3D%3D&position=7&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-factspan-3773099421?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=zml1CQ7VBx2LhdcwmqY2%2Fg%3D%3D&position=8&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-lead-data-scientist-at-makemytrip-3775206966?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=BpwORRecD4ElN4ikRtlXLA%3D%3D&position=9&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/business-data-scientist-machine-learning-at-google-3757566274?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=GjF5NcJtZVQ%2BbrAH5WmP5g%3D%3D&position=10&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-jupiter-ai-labs-%E2%9C%94-3774484696?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=fqz1d2aH3Hia%2Fjd8GHW%2FSw%3D%3D&position=11&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-data-scientist-at-persistent-systems-3767577121?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=i7Fj%2BdTM9QYeNAmPbd4QNQ%3D%3D&position=12&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/generative-artificial-intelligence-ai-at-coderoofs-it-solutions-3775140084?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=n05H%2BvlRpxM5pKsjT7X4lg%3D%3D&position=13&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-python-engineer-at-xenspire-3776742509?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=oqPHDohDLtJwrH0PJ2JdAg%3D%3D&position=14&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-unicorn-metric-ai-3778195254?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=oQwTNQ8eAals8HEXWjvWJA%3D%3D&position=15&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/artificial-intelligence-researcher-at-lens-corporation-3772656078?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=7XGPeivl3BGGdwu%2FltrCnw%3D%3D&position=16&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/engineer-ai-ml-at-qualcomm-3775138234?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=9%2Bfell6tv5M0F32ISdxrmw%3D%3D&position=17&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-intellect-design-arena-ltd-3768779927?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=nKLWdEtof9DSOyzQDLzqsQ%3D%3D&position=18&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-data-scientist-battery-industry-at-connect-tech%2Btalent-3731597862?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=dx3f7j%2FhYaxwRkRYYV%2BIdw%3D%3D&position=19&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-cloudmoyo-3687680963?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=%2B3tNPUeDHOpXJA8Lr8i1fw%3D%3D&position=20&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-analyst-at-infosys-3730479042?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=rsloG%2B06seG4KHVKBUX7Cg%3D%3D&position=21&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-application-developer-at-latentview-analytics-3767582937?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=n4AnMS1Emo0FDDIHMU0TgQ%3D%3D&position=22&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-harman-international-3723397941?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=PcVDZDSG12a2kMcthdn0bw%3D%3D&position=23&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/principal-data-scientist-at-makemytrip-3775206535?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=NAE5MMW5Og4ystZ6cI2FIA%3D%3D&position=24&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ml-engineer-at-fulcrum-digital-inc-3776970598?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=4XWtskgxnCxUE0pbNw45Sw%3D%3D&position=25&pageNum=0&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-shree-cement-ltd-3759027790?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=zcTi514r5y%2B5JkMwjCv4NQ%3D%3D&position=1&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-researcher-computer-vision-at-scaletorch-ai-3767554374?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=vY2sBTOjGlrtAnEIkgfyYA%3D%3D&position=2&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science-consultant-at-deloitte-3769276495?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=WmyB9lhjDVfHN4%2BQPcTxYg%3D%3D&position=3&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-zacks-investment-research-3755740131?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=tMNPLaApxDDQkRSa0G0h5w%3D%3D&position=4&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-blucognition-3755450300?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=i2OTulqMm1aKugPR6PVXJA%3D%3D&position=5&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ml-engineer-at-fulcrum-digital-inc-3776967920?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=WDEgQDaEUTMNG1WoBLAjng%3D%3D&position=6&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-enterpret-3774380362?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=dehHpST1OKhbG%2FSG2OA1rQ%3D%3D&position=7&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-with-azure-at-ltimindtree-3769036429?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=jLHQLP5aAtYChMgk7cD9Zg%3D%3D&position=8&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-acn-infotech-india-pvt-ltd-3766934523?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=%2BC62ajHy3yj1JlVjvnlCJA%3D%3D&position=9&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-healthifyme-3776045890?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=w4ysruw1XcGyTeRhuWEH5w%3D%3D&position=10&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-artificial-intelligence-at-ibm-3739945752?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=yCy3ZRoxV1W3XuzRwkJrGQ%3D%3D&position=11&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-developer-intern-at-partify-ai-3773024910?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=oJmwgizunhxhVjxinkiwHA%3D%3D&position=12&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-engineer-at-viga-entertainment-technology-3776744195?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=ib4WDL5pnA19dnLvP8VYkw%3D%3D&position=13&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-neoma-3773042802?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=EjpBxec25ItCOhRSSJWXSA%3D%3D&position=14&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-data-scientist-at-microsoft-3673331456?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=hdQPVUrsXV3RKl5oWraVyA%3D%3D&position=15&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-ai-specialist-iit-at-cybersealai-3766232238?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=47FkyKD2pwI8bFQhwaGJlg%3D%3D&position=16&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-artificial-intelligence-at-ibm-3739946231?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=%2BXJkcAeRoFX4OUrvA%2F0Afw%3D%3D&position=17&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/artificial-intelligence-researcher-at-hyperhire-3773093676?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=r%2BTmUJSbw2%2FWQd94rv9rBw%3D%3D&position=18&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-speech-ai-remote-india-at-level-ai-3774966323?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=p4GRU5H5J0UhPD5NySYQmw%3D%3D&position=19&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/principal-data-scientist-ai-ml-bi-up-to-40-lpa-at-klimbb-3773502037?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=KKcwWKMr9cfzh3vKoggchQ%3D%3D&position=20&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-artificial-intelligence-at-ibm-3739945751?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=NSj2I0ddVbx0NVQW7bFZXA%3D%3D&position=21&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-data-scientist-at-microsoft-3673115406?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=eNXNmAN9IEEKUrDKRAAV0Q%3D%3D&position=22&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-ops-engineer-at-ltimindtree-3777564009?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=p%2B5EV5YvkFzYq7SqXohC8w%3D%3D&position=23&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/nlp-chat-gpt-at-ltimindtree-3772965413?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=6dRGFWM5mwYNEqNJ%2FVvQVQ%3D%3D&position=24&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science%2B-gcp-at-ltimindtree-3766955338?refId=5mLckH618SAHjLoK0FQV3Q%3D%3D&trackingId=0bRNgoQw5t3OVa9wPU1oSg%3D%3D&position=25&pageNum=1&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science%2B-gcp-at-ltimindtree-3766954376?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=IJW10tBUZza%2FX52w7VQS8g%3D%3D&position=1&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science%2B-gcp-at-ltimindtree-3766957094?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=O2tHq%2Fn5LP%2FZRFKVWm8Z6A%3D%3D&position=2&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/assistant-manager-data-science-at-unilever-3774783891?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=oZQK64V%2BNsLWd6TxxCZHng%3D%3D&position=3&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-science%2B-gcp-at-ltimindtree-3766955395?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=%2BH26NrGaYLPZrrmf1uNXLQ%3D%3D&position=4&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/sr-software-engineer-machine-learning-at-roku-inc-3776116287?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=kGIthz68rrMIAelJ9aJqkg%3D%3D&position=5&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-mapout-3774365743?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=8O%2F62VZEL7qwwqMvzfrSgQ%3D%3D&position=6&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-tata-consultancy-services-3767591125?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=ZZFZIcRV1thtuRhyDUIxvQ%3D%3D&position=7&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-techfastic-3769076409?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=gO7waXoDBeBFF6x%2FfKSxMQ%3D%3D&position=8&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-machine-learning-engineer-at-quantium-3675003650?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=8bP09eqBMiCppjCy5Bcb%2FQ%3D%3D&position=9&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-at-simplismart-3773307335?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=CmhDiQ1cfRAstAEpODg%2F%2Fg%3D%3D&position=10&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-at-ajnalens-3742204510?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=Fw6t5JT2di9NFIc087ViTw%3D%3D&position=11&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-nlp-at-quantiphi-3777501193?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=2Ar1jtFzmzGrJSj1jWg6fg%3D%3D&position=12&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-nlp-at-quantiphi-3777500405?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=v87TNXw%2BpkX1MqyfrtRHGg%3D%3D&position=13&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-with-azure-at-ltimindtree-3766962969?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=U%2FF2cAfwN65pDV%2Bi49hnkA%3D%3D&position=14&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-advanced-analytics-at-ibm-3739945748?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=MwTqZIiJkiYESyfOI1KtCA%3D%3D&position=15&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-programmer-at-neoma-3773022866?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=hULaxedPgh1%2BPosg8tkgDA%3D%3D&position=16&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-advanced-analytics-at-ibm-3739460541?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=cj8gBO2%2FN6F9t4iiU0BbgQ%3D%3D&position=17&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-engineer-sre-at-dew-software-3776954462?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=wMyNXT5mzntOYO3RawcRQQ%3D%3D&position=18&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-scientist-advanced-analytics-at-ibm-3739945749?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=c%2BnY4n3aZ7elMNA1EdAmMA%3D%3D&position=19&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/machine-learning-artificial-intelligence-at-fusion-plus-solutions-inc-3768011810?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=Q85Pn70cybeMsWJ935v9ew%3D%3D&position=20&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-developer-at-vizon-3775140224?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=CkLo8%2BQbJ%2BUyJDYTQZr17g%3D%3D&position=21&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/senior-data-scientist-at-singular-intelligence-3770111690?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=GpXPtM3Io5Q2GHjNKepuYQ%3D%3D&position=22&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/ai-ml-with-azure-at-ltimindtree-3769039244?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=zIVAWnuVHpElubaYw3GnDg%3D%3D&position=23&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/data-product-lead-machine-learning-at-a-p-moller-maersk-3774356719?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=l8OO7D68VUZtXd%2Fkww%2FkhA%3D%3D&position=24&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/sr-ai-ml-consultant-at-epiq-3772999318?refId=q968GdRJznBjPlP%2B8DbrzA%3D%3D&trackingId=994whamnLqENYogIT5Lj5Q%3D%3D&position=25&pageNum=2&trk=public_jobs_jserp-result_search-card', 'https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-nasscom-3775137519?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=tIIHxWA8tlGFmgU1MGBbpQ%3D%3D&trk=public_jobs_topcard-title']\n"
|
201 |
+
]
|
202 |
+
}
|
203 |
+
],
|
204 |
+
"source": [
|
205 |
+
"job_url = []\n",
|
206 |
+
"\n",
|
207 |
+
"try:\n",
|
208 |
+
" url = driver.find_elements(by=By.XPATH, value='//a[contains(@href, \"/jobs/\")]')\n",
|
209 |
+
" for i in url:\n",
|
210 |
+
" job_url.append(i.get_attribute('href'))\n",
|
211 |
+
"\n",
|
212 |
+
"except:\n",
|
213 |
+
" pass\n",
|
214 |
+
"\n",
|
215 |
+
"print(job_url)"
|
216 |
+
]
|
217 |
+
},
|
218 |
+
{
|
219 |
+
"cell_type": "code",
|
220 |
+
"execution_count": 127,
|
221 |
+
"metadata": {},
|
222 |
+
"outputs": [
|
223 |
+
{
|
224 |
+
"data": {
|
225 |
+
"text/html": [
|
226 |
+
"<div>\n",
|
227 |
+
"<style scoped>\n",
|
228 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
229 |
+
" vertical-align: middle;\n",
|
230 |
+
" }\n",
|
231 |
+
"\n",
|
232 |
+
" .dataframe tbody tr th {\n",
|
233 |
+
" vertical-align: top;\n",
|
234 |
+
" }\n",
|
235 |
+
"\n",
|
236 |
+
" .dataframe thead th {\n",
|
237 |
+
" text-align: right;\n",
|
238 |
+
" }\n",
|
239 |
+
"</style>\n",
|
240 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
241 |
+
" <thead>\n",
|
242 |
+
" <tr style=\"text-align: right;\">\n",
|
243 |
+
" <th></th>\n",
|
244 |
+
" <th>Company Name</th>\n",
|
245 |
+
" <th>Job Title</th>\n",
|
246 |
+
" <th>Location</th>\n",
|
247 |
+
" <th>Website URL</th>\n",
|
248 |
+
" </tr>\n",
|
249 |
+
" </thead>\n",
|
250 |
+
" <tbody>\n",
|
251 |
+
" <tr>\n",
|
252 |
+
" <th>0</th>\n",
|
253 |
+
" <td>nasscom</td>\n",
|
254 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
255 |
+
" <td>Noida, Uttar Pradesh, India</td>\n",
|
256 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
257 |
+
" </tr>\n",
|
258 |
+
" <tr>\n",
|
259 |
+
" <th>1</th>\n",
|
260 |
+
" <td>Deloitte</td>\n",
|
261 |
+
" <td>Data Scientist</td>\n",
|
262 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
263 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
264 |
+
" </tr>\n",
|
265 |
+
" <tr>\n",
|
266 |
+
" <th>2</th>\n",
|
267 |
+
" <td>L&T Technology Services</td>\n",
|
268 |
+
" <td>Data Scientist</td>\n",
|
269 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
270 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
271 |
+
" </tr>\n",
|
272 |
+
" <tr>\n",
|
273 |
+
" <th>3</th>\n",
|
274 |
+
" <td>Predis.ai</td>\n",
|
275 |
+
" <td>Data Science</td>\n",
|
276 |
+
" <td>Pune, Maharashtra, India</td>\n",
|
277 |
+
" <td>https://in.linkedin.com/jobs/view/data-science...</td>\n",
|
278 |
+
" </tr>\n",
|
279 |
+
" <tr>\n",
|
280 |
+
" <th>4</th>\n",
|
281 |
+
" <td>Api Logistics</td>\n",
|
282 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
283 |
+
" <td>Gurgaon, Haryana, India</td>\n",
|
284 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
285 |
+
" </tr>\n",
|
286 |
+
" <tr>\n",
|
287 |
+
" <th>...</th>\n",
|
288 |
+
" <td>...</td>\n",
|
289 |
+
" <td>...</td>\n",
|
290 |
+
" <td>...</td>\n",
|
291 |
+
" <td>...</td>\n",
|
292 |
+
" </tr>\n",
|
293 |
+
" <tr>\n",
|
294 |
+
" <th>70</th>\n",
|
295 |
+
" <td>VIZON</td>\n",
|
296 |
+
" <td>AI Developer</td>\n",
|
297 |
+
" <td>Dehradun, Uttarakhand, India</td>\n",
|
298 |
+
" <td>https://in.linkedin.com/jobs/view/ai-developer...</td>\n",
|
299 |
+
" </tr>\n",
|
300 |
+
" <tr>\n",
|
301 |
+
" <th>71</th>\n",
|
302 |
+
" <td>Singular Intelligence</td>\n",
|
303 |
+
" <td>Senior Data Scientist</td>\n",
|
304 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
305 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
306 |
+
" </tr>\n",
|
307 |
+
" <tr>\n",
|
308 |
+
" <th>72</th>\n",
|
309 |
+
" <td>LTIMindtree</td>\n",
|
310 |
+
" <td>AI/ML with Azure</td>\n",
|
311 |
+
" <td>Coimbatore, Tamil Nadu, India</td>\n",
|
312 |
+
" <td>https://in.linkedin.com/jobs/view/ai-ml-with-a...</td>\n",
|
313 |
+
" </tr>\n",
|
314 |
+
" <tr>\n",
|
315 |
+
" <th>73</th>\n",
|
316 |
+
" <td>A.P. Moller - Maersk</td>\n",
|
317 |
+
" <td>Data Product Lead - Machine Learning</td>\n",
|
318 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
319 |
+
" <td>https://in.linkedin.com/jobs/view/data-product...</td>\n",
|
320 |
+
" </tr>\n",
|
321 |
+
" <tr>\n",
|
322 |
+
" <th>74</th>\n",
|
323 |
+
" <td>Epiq</td>\n",
|
324 |
+
" <td>Sr AI/ML Consultant</td>\n",
|
325 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
326 |
+
" <td>https://in.linkedin.com/jobs/view/sr-ai-ml-con...</td>\n",
|
327 |
+
" </tr>\n",
|
328 |
+
" </tbody>\n",
|
329 |
+
"</table>\n",
|
330 |
+
"<p>75 rows × 4 columns</p>\n",
|
331 |
+
"</div>"
|
332 |
+
],
|
333 |
+
"text/plain": [
|
334 |
+
" Company Name Job Title \\\n",
|
335 |
+
"0 nasscom Artificial Intelligence (AI) \n",
|
336 |
+
"1 Deloitte Data Scientist \n",
|
337 |
+
"2 L&T Technology Services Data Scientist \n",
|
338 |
+
"3 Predis.ai Data Science \n",
|
339 |
+
"4 Api Logistics Artificial Intelligence (AI) \n",
|
340 |
+
".. ... ... \n",
|
341 |
+
"70 VIZON AI Developer \n",
|
342 |
+
"71 Singular Intelligence Senior Data Scientist \n",
|
343 |
+
"72 LTIMindtree AI/ML with Azure \n",
|
344 |
+
"73 A.P. Moller - Maersk Data Product Lead - Machine Learning \n",
|
345 |
+
"74 Epiq Sr AI/ML Consultant \n",
|
346 |
+
"\n",
|
347 |
+
" Location \\\n",
|
348 |
+
"0 Noida, Uttar Pradesh, India \n",
|
349 |
+
"1 Gurugram, Haryana, India \n",
|
350 |
+
"2 Hyderabad, Telangana, India \n",
|
351 |
+
"3 Pune, Maharashtra, India \n",
|
352 |
+
"4 Gurgaon, Haryana, India \n",
|
353 |
+
".. ... \n",
|
354 |
+
"70 Dehradun, Uttarakhand, India \n",
|
355 |
+
"71 Bengaluru, Karnataka, India \n",
|
356 |
+
"72 Coimbatore, Tamil Nadu, India \n",
|
357 |
+
"73 Bengaluru, Karnataka, India \n",
|
358 |
+
"74 Hyderabad, Telangana, India \n",
|
359 |
+
"\n",
|
360 |
+
" Website URL \n",
|
361 |
+
"0 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
362 |
+
"1 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
363 |
+
"2 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
364 |
+
"3 https://in.linkedin.com/jobs/view/data-science... \n",
|
365 |
+
"4 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
366 |
+
".. ... \n",
|
367 |
+
"70 https://in.linkedin.com/jobs/view/ai-developer... \n",
|
368 |
+
"71 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
369 |
+
"72 https://in.linkedin.com/jobs/view/ai-ml-with-a... \n",
|
370 |
+
"73 https://in.linkedin.com/jobs/view/data-product... \n",
|
371 |
+
"74 https://in.linkedin.com/jobs/view/sr-ai-ml-con... \n",
|
372 |
+
"\n",
|
373 |
+
"[75 rows x 4 columns]"
|
374 |
+
]
|
375 |
+
},
|
376 |
+
"execution_count": 127,
|
377 |
+
"metadata": {},
|
378 |
+
"output_type": "execute_result"
|
379 |
+
}
|
380 |
+
],
|
381 |
+
"source": [
|
382 |
+
"df = pd.DataFrame(company_name, columns=['Company Name'])\n",
|
383 |
+
"df['Job Title'] = pd.DataFrame(job_title)\n",
|
384 |
+
"df['Location'] = pd.DataFrame(company_location)\n",
|
385 |
+
"df['Website URL'] = pd.DataFrame(job_url)\n",
|
386 |
+
"df"
|
387 |
+
]
|
388 |
+
},
|
389 |
+
{
|
390 |
+
"cell_type": "code",
|
391 |
+
"execution_count": 129,
|
392 |
+
"metadata": {},
|
393 |
+
"outputs": [],
|
394 |
+
"source": [
|
395 |
+
"def job_title_filter(x, user_input_job_title):\n",
|
396 |
+
" \n",
|
397 |
+
" s = [i.lower() for i in user_input_job_title]\n",
|
398 |
+
" suggestion = []\n",
|
399 |
+
" for i in s:\n",
|
400 |
+
" suggestion.extend(i.split())\n",
|
401 |
+
"\n",
|
402 |
+
" s = x.split()\n",
|
403 |
+
" a = [i.lower() for i in s]\n",
|
404 |
+
" \n",
|
405 |
+
" intersection = list(set(suggestion).intersection(set(a)))\n",
|
406 |
+
" return x if len(intersection)>1 else np.nan"
|
407 |
+
]
|
408 |
+
},
|
409 |
+
{
|
410 |
+
"cell_type": "code",
|
411 |
+
"execution_count": 130,
|
412 |
+
"metadata": {},
|
413 |
+
"outputs": [
|
414 |
+
{
|
415 |
+
"data": {
|
416 |
+
"text/html": [
|
417 |
+
"<div>\n",
|
418 |
+
"<style scoped>\n",
|
419 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
420 |
+
" vertical-align: middle;\n",
|
421 |
+
" }\n",
|
422 |
+
"\n",
|
423 |
+
" .dataframe tbody tr th {\n",
|
424 |
+
" vertical-align: top;\n",
|
425 |
+
" }\n",
|
426 |
+
"\n",
|
427 |
+
" .dataframe thead th {\n",
|
428 |
+
" text-align: right;\n",
|
429 |
+
" }\n",
|
430 |
+
"</style>\n",
|
431 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
432 |
+
" <thead>\n",
|
433 |
+
" <tr style=\"text-align: right;\">\n",
|
434 |
+
" <th></th>\n",
|
435 |
+
" <th>Company Name</th>\n",
|
436 |
+
" <th>Job Title</th>\n",
|
437 |
+
" <th>Location</th>\n",
|
438 |
+
" <th>Website URL</th>\n",
|
439 |
+
" </tr>\n",
|
440 |
+
" </thead>\n",
|
441 |
+
" <tbody>\n",
|
442 |
+
" <tr>\n",
|
443 |
+
" <th>0</th>\n",
|
444 |
+
" <td>nasscom</td>\n",
|
445 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
446 |
+
" <td>Noida, Uttar Pradesh, India</td>\n",
|
447 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
448 |
+
" </tr>\n",
|
449 |
+
" <tr>\n",
|
450 |
+
" <th>1</th>\n",
|
451 |
+
" <td>Deloitte</td>\n",
|
452 |
+
" <td>Data Scientist</td>\n",
|
453 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
454 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
455 |
+
" </tr>\n",
|
456 |
+
" <tr>\n",
|
457 |
+
" <th>2</th>\n",
|
458 |
+
" <td>L&T Technology Services</td>\n",
|
459 |
+
" <td>Data Scientist</td>\n",
|
460 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
461 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
462 |
+
" </tr>\n",
|
463 |
+
" <tr>\n",
|
464 |
+
" <th>3</th>\n",
|
465 |
+
" <td>Api Logistics</td>\n",
|
466 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
467 |
+
" <td>Gurgaon, Haryana, India</td>\n",
|
468 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
469 |
+
" </tr>\n",
|
470 |
+
" <tr>\n",
|
471 |
+
" <th>4</th>\n",
|
472 |
+
" <td>E2E Networks Limited</td>\n",
|
473 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
474 |
+
" <td>Delhi, India</td>\n",
|
475 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
476 |
+
" </tr>\n",
|
477 |
+
" <tr>\n",
|
478 |
+
" <th>5</th>\n",
|
479 |
+
" <td>Factspan</td>\n",
|
480 |
+
" <td>Data Scientist</td>\n",
|
481 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
482 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
483 |
+
" </tr>\n",
|
484 |
+
" <tr>\n",
|
485 |
+
" <th>6</th>\n",
|
486 |
+
" <td>MakeMyTrip</td>\n",
|
487 |
+
" <td>Senior/Lead Data Scientist</td>\n",
|
488 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
489 |
+
" <td>https://in.linkedin.com/jobs/view/senior-lead-...</td>\n",
|
490 |
+
" </tr>\n",
|
491 |
+
" <tr>\n",
|
492 |
+
" <th>7</th>\n",
|
493 |
+
" <td>Persistent Systems</td>\n",
|
494 |
+
" <td>Senior Data Scientist</td>\n",
|
495 |
+
" <td>Pune, Maharashtra, India</td>\n",
|
496 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
497 |
+
" </tr>\n",
|
498 |
+
" <tr>\n",
|
499 |
+
" <th>8</th>\n",
|
500 |
+
" <td>CodeRoofs IT Solutions</td>\n",
|
501 |
+
" <td>Generative Artificial Intelligence (AI)</td>\n",
|
502 |
+
" <td>Sahibzada Ajit Singh Nagar, Punjab, India</td>\n",
|
503 |
+
" <td>https://in.linkedin.com/jobs/view/generative-a...</td>\n",
|
504 |
+
" </tr>\n",
|
505 |
+
" <tr>\n",
|
506 |
+
" <th>9</th>\n",
|
507 |
+
" <td>LENS Corporation</td>\n",
|
508 |
+
" <td>Artificial Intelligence Researcher</td>\n",
|
509 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
510 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
511 |
+
" </tr>\n",
|
512 |
+
" <tr>\n",
|
513 |
+
" <th>10</th>\n",
|
514 |
+
" <td>Connect Tech+Talent</td>\n",
|
515 |
+
" <td>Senior Data Scientist (Battery Industry)</td>\n",
|
516 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
517 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
518 |
+
" </tr>\n",
|
519 |
+
" <tr>\n",
|
520 |
+
" <th>11</th>\n",
|
521 |
+
" <td>CloudMoyo</td>\n",
|
522 |
+
" <td>Data Scientist</td>\n",
|
523 |
+
" <td>Pune, Maharashtra, India</td>\n",
|
524 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
525 |
+
" </tr>\n",
|
526 |
+
" <tr>\n",
|
527 |
+
" <th>12</th>\n",
|
528 |
+
" <td>HARMAN International</td>\n",
|
529 |
+
" <td>Data Scientist</td>\n",
|
530 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
531 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
532 |
+
" </tr>\n",
|
533 |
+
" <tr>\n",
|
534 |
+
" <th>13</th>\n",
|
535 |
+
" <td>MakeMyTrip</td>\n",
|
536 |
+
" <td>Principal Data Scientist</td>\n",
|
537 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
538 |
+
" <td>https://in.linkedin.com/jobs/view/principal-da...</td>\n",
|
539 |
+
" </tr>\n",
|
540 |
+
" <tr>\n",
|
541 |
+
" <th>14</th>\n",
|
542 |
+
" <td>Shree Cement Ltd.</td>\n",
|
543 |
+
" <td>Data Scientist</td>\n",
|
544 |
+
" <td>Kolkata, West Bengal, India</td>\n",
|
545 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
546 |
+
" </tr>\n",
|
547 |
+
" <tr>\n",
|
548 |
+
" <th>15</th>\n",
|
549 |
+
" <td>ACN Infotech (India) Pvt. Ltd.</td>\n",
|
550 |
+
" <td>Data Scientist</td>\n",
|
551 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
552 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
553 |
+
" </tr>\n",
|
554 |
+
" <tr>\n",
|
555 |
+
" <th>16</th>\n",
|
556 |
+
" <td>IBM</td>\n",
|
557 |
+
" <td>Data Scientist: Artificial Intelligence</td>\n",
|
558 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
559 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
560 |
+
" </tr>\n",
|
561 |
+
" <tr>\n",
|
562 |
+
" <th>17</th>\n",
|
563 |
+
" <td>Microsoft</td>\n",
|
564 |
+
" <td>Senior Data Scientist</td>\n",
|
565 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
566 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
567 |
+
" </tr>\n",
|
568 |
+
" <tr>\n",
|
569 |
+
" <th>18</th>\n",
|
570 |
+
" <td>IBM</td>\n",
|
571 |
+
" <td>Data Scientist: Artificial Intelligence</td>\n",
|
572 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
573 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
574 |
+
" </tr>\n",
|
575 |
+
" <tr>\n",
|
576 |
+
" <th>19</th>\n",
|
577 |
+
" <td>Hyperhire</td>\n",
|
578 |
+
" <td>Artificial Intelligence Researcher</td>\n",
|
579 |
+
" <td>India</td>\n",
|
580 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
581 |
+
" </tr>\n",
|
582 |
+
" <tr>\n",
|
583 |
+
" <th>20</th>\n",
|
584 |
+
" <td>klimbB</td>\n",
|
585 |
+
" <td>Principal Data Scientist - AI / ML BI (up to 4...</td>\n",
|
586 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
587 |
+
" <td>https://in.linkedin.com/jobs/view/principal-da...</td>\n",
|
588 |
+
" </tr>\n",
|
589 |
+
" <tr>\n",
|
590 |
+
" <th>21</th>\n",
|
591 |
+
" <td>IBM</td>\n",
|
592 |
+
" <td>Data Scientist: Artificial Intelligence</td>\n",
|
593 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
594 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
595 |
+
" </tr>\n",
|
596 |
+
" <tr>\n",
|
597 |
+
" <th>22</th>\n",
|
598 |
+
" <td>Microsoft</td>\n",
|
599 |
+
" <td>Senior Data Scientist</td>\n",
|
600 |
+
" <td>Mumbai, Maharashtra, India</td>\n",
|
601 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
602 |
+
" </tr>\n",
|
603 |
+
" <tr>\n",
|
604 |
+
" <th>23</th>\n",
|
605 |
+
" <td>Techfastic</td>\n",
|
606 |
+
" <td>Data Scientist</td>\n",
|
607 |
+
" <td>India</td>\n",
|
608 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
609 |
+
" </tr>\n",
|
610 |
+
" <tr>\n",
|
611 |
+
" <th>24</th>\n",
|
612 |
+
" <td>AjnaLens</td>\n",
|
613 |
+
" <td>Data Scientist</td>\n",
|
614 |
+
" <td>Thane, Maharashtra, India</td>\n",
|
615 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
616 |
+
" </tr>\n",
|
617 |
+
" <tr>\n",
|
618 |
+
" <th>25</th>\n",
|
619 |
+
" <td>Fusion Plus Solutions Inc</td>\n",
|
620 |
+
" <td>Machine learning & artificial Intelligence</td>\n",
|
621 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
622 |
+
" <td>https://in.linkedin.com/jobs/view/machine-lear...</td>\n",
|
623 |
+
" </tr>\n",
|
624 |
+
" <tr>\n",
|
625 |
+
" <th>26</th>\n",
|
626 |
+
" <td>Singular Intelligence</td>\n",
|
627 |
+
" <td>Senior Data Scientist</td>\n",
|
628 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
629 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
630 |
+
" </tr>\n",
|
631 |
+
" </tbody>\n",
|
632 |
+
"</table>\n",
|
633 |
+
"</div>"
|
634 |
+
],
|
635 |
+
"text/plain": [
|
636 |
+
" Company Name \\\n",
|
637 |
+
"0 nasscom \n",
|
638 |
+
"1 Deloitte \n",
|
639 |
+
"2 L&T Technology Services \n",
|
640 |
+
"3 Api Logistics \n",
|
641 |
+
"4 E2E Networks Limited \n",
|
642 |
+
"5 Factspan \n",
|
643 |
+
"6 MakeMyTrip \n",
|
644 |
+
"7 Persistent Systems \n",
|
645 |
+
"8 CodeRoofs IT Solutions \n",
|
646 |
+
"9 LENS Corporation \n",
|
647 |
+
"10 Connect Tech+Talent \n",
|
648 |
+
"11 CloudMoyo \n",
|
649 |
+
"12 HARMAN International \n",
|
650 |
+
"13 MakeMyTrip \n",
|
651 |
+
"14 Shree Cement Ltd. \n",
|
652 |
+
"15 ACN Infotech (India) Pvt. Ltd. \n",
|
653 |
+
"16 IBM \n",
|
654 |
+
"17 Microsoft \n",
|
655 |
+
"18 IBM \n",
|
656 |
+
"19 Hyperhire \n",
|
657 |
+
"20 klimbB \n",
|
658 |
+
"21 IBM \n",
|
659 |
+
"22 Microsoft \n",
|
660 |
+
"23 Techfastic \n",
|
661 |
+
"24 AjnaLens \n",
|
662 |
+
"25 Fusion Plus Solutions Inc \n",
|
663 |
+
"26 Singular Intelligence \n",
|
664 |
+
"\n",
|
665 |
+
" Job Title \\\n",
|
666 |
+
"0 Artificial Intelligence (AI) \n",
|
667 |
+
"1 Data Scientist \n",
|
668 |
+
"2 Data Scientist \n",
|
669 |
+
"3 Artificial Intelligence (AI) \n",
|
670 |
+
"4 Artificial Intelligence (AI) \n",
|
671 |
+
"5 Data Scientist \n",
|
672 |
+
"6 Senior/Lead Data Scientist \n",
|
673 |
+
"7 Senior Data Scientist \n",
|
674 |
+
"8 Generative Artificial Intelligence (AI) \n",
|
675 |
+
"9 Artificial Intelligence Researcher \n",
|
676 |
+
"10 Senior Data Scientist (Battery Industry) \n",
|
677 |
+
"11 Data Scientist \n",
|
678 |
+
"12 Data Scientist \n",
|
679 |
+
"13 Principal Data Scientist \n",
|
680 |
+
"14 Data Scientist \n",
|
681 |
+
"15 Data Scientist \n",
|
682 |
+
"16 Data Scientist: Artificial Intelligence \n",
|
683 |
+
"17 Senior Data Scientist \n",
|
684 |
+
"18 Data Scientist: Artificial Intelligence \n",
|
685 |
+
"19 Artificial Intelligence Researcher \n",
|
686 |
+
"20 Principal Data Scientist - AI / ML BI (up to 4... \n",
|
687 |
+
"21 Data Scientist: Artificial Intelligence \n",
|
688 |
+
"22 Senior Data Scientist \n",
|
689 |
+
"23 Data Scientist \n",
|
690 |
+
"24 Data Scientist \n",
|
691 |
+
"25 Machine learning & artificial Intelligence \n",
|
692 |
+
"26 Senior Data Scientist \n",
|
693 |
+
"\n",
|
694 |
+
" Location \\\n",
|
695 |
+
"0 Noida, Uttar Pradesh, India \n",
|
696 |
+
"1 Gurugram, Haryana, India \n",
|
697 |
+
"2 Hyderabad, Telangana, India \n",
|
698 |
+
"3 Gurgaon, Haryana, India \n",
|
699 |
+
"4 Delhi, India \n",
|
700 |
+
"5 Bengaluru, Karnataka, India \n",
|
701 |
+
"6 Bengaluru, Karnataka, India \n",
|
702 |
+
"7 Pune, Maharashtra, India \n",
|
703 |
+
"8 Sahibzada Ajit Singh Nagar, Punjab, India \n",
|
704 |
+
"9 Gurugram, Haryana, India \n",
|
705 |
+
"10 Hyderabad, Telangana, India \n",
|
706 |
+
"11 Pune, Maharashtra, India \n",
|
707 |
+
"12 Bengaluru, Karnataka, India \n",
|
708 |
+
"13 Bengaluru, Karnataka, India \n",
|
709 |
+
"14 Kolkata, West Bengal, India \n",
|
710 |
+
"15 Hyderabad, Telangana, India \n",
|
711 |
+
"16 Hyderabad, Telangana, India \n",
|
712 |
+
"17 Bengaluru, Karnataka, India \n",
|
713 |
+
"18 Hyderabad, Telangana, India \n",
|
714 |
+
"19 India \n",
|
715 |
+
"20 Bengaluru, Karnataka, India \n",
|
716 |
+
"21 Hyderabad, Telangana, India \n",
|
717 |
+
"22 Mumbai, Maharashtra, India \n",
|
718 |
+
"23 India \n",
|
719 |
+
"24 Thane, Maharashtra, India \n",
|
720 |
+
"25 Hyderabad, Telangana, India \n",
|
721 |
+
"26 Bengaluru, Karnataka, India \n",
|
722 |
+
"\n",
|
723 |
+
" Website URL \n",
|
724 |
+
"0 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
725 |
+
"1 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
726 |
+
"2 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
727 |
+
"3 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
728 |
+
"4 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
729 |
+
"5 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
730 |
+
"6 https://in.linkedin.com/jobs/view/senior-lead-... \n",
|
731 |
+
"7 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
732 |
+
"8 https://in.linkedin.com/jobs/view/generative-a... \n",
|
733 |
+
"9 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
734 |
+
"10 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
735 |
+
"11 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
736 |
+
"12 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
737 |
+
"13 https://in.linkedin.com/jobs/view/principal-da... \n",
|
738 |
+
"14 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
739 |
+
"15 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
740 |
+
"16 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
741 |
+
"17 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
742 |
+
"18 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
743 |
+
"19 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
744 |
+
"20 https://in.linkedin.com/jobs/view/principal-da... \n",
|
745 |
+
"21 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
746 |
+
"22 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
747 |
+
"23 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
748 |
+
"24 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
749 |
+
"25 https://in.linkedin.com/jobs/view/machine-lear... \n",
|
750 |
+
"26 https://in.linkedin.com/jobs/view/senior-data-... "
|
751 |
+
]
|
752 |
+
},
|
753 |
+
"execution_count": 130,
|
754 |
+
"metadata": {},
|
755 |
+
"output_type": "execute_result"
|
756 |
+
}
|
757 |
+
],
|
758 |
+
"source": [
|
759 |
+
"df['Job Title'] = df['Job Title'].apply(lambda x: job_title_filter(x, user_input_job_title))\n",
|
760 |
+
"df=df.dropna()\n",
|
761 |
+
"df.reset_index(drop=True, inplace=True)\n",
|
762 |
+
"df"
|
763 |
+
]
|
764 |
+
},
|
765 |
+
{
|
766 |
+
"cell_type": "code",
|
767 |
+
"execution_count": 131,
|
768 |
+
"metadata": {},
|
769 |
+
"outputs": [
|
770 |
+
{
|
771 |
+
"data": {
|
772 |
+
"text/html": [
|
773 |
+
"<div>\n",
|
774 |
+
"<style scoped>\n",
|
775 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
776 |
+
" vertical-align: middle;\n",
|
777 |
+
" }\n",
|
778 |
+
"\n",
|
779 |
+
" .dataframe tbody tr th {\n",
|
780 |
+
" vertical-align: top;\n",
|
781 |
+
" }\n",
|
782 |
+
"\n",
|
783 |
+
" .dataframe thead th {\n",
|
784 |
+
" text-align: right;\n",
|
785 |
+
" }\n",
|
786 |
+
"</style>\n",
|
787 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
788 |
+
" <thead>\n",
|
789 |
+
" <tr style=\"text-align: right;\">\n",
|
790 |
+
" <th></th>\n",
|
791 |
+
" <th>Company Name</th>\n",
|
792 |
+
" <th>Job Title</th>\n",
|
793 |
+
" <th>Location</th>\n",
|
794 |
+
" <th>Website URL</th>\n",
|
795 |
+
" </tr>\n",
|
796 |
+
" </thead>\n",
|
797 |
+
" <tbody>\n",
|
798 |
+
" <tr>\n",
|
799 |
+
" <th>0</th>\n",
|
800 |
+
" <td>nasscom</td>\n",
|
801 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
802 |
+
" <td>Noida, Uttar Pradesh, India</td>\n",
|
803 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
804 |
+
" </tr>\n",
|
805 |
+
" <tr>\n",
|
806 |
+
" <th>1</th>\n",
|
807 |
+
" <td>Deloitte</td>\n",
|
808 |
+
" <td>Data Scientist</td>\n",
|
809 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
810 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
811 |
+
" </tr>\n",
|
812 |
+
" <tr>\n",
|
813 |
+
" <th>2</th>\n",
|
814 |
+
" <td>L&T Technology Services</td>\n",
|
815 |
+
" <td>Data Scientist</td>\n",
|
816 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
817 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
818 |
+
" </tr>\n",
|
819 |
+
" <tr>\n",
|
820 |
+
" <th>3</th>\n",
|
821 |
+
" <td>Api Logistics</td>\n",
|
822 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
823 |
+
" <td>Gurgaon, Haryana, India</td>\n",
|
824 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
825 |
+
" </tr>\n",
|
826 |
+
" <tr>\n",
|
827 |
+
" <th>4</th>\n",
|
828 |
+
" <td>E2E Networks Limited</td>\n",
|
829 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
830 |
+
" <td>Delhi, India</td>\n",
|
831 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
832 |
+
" </tr>\n",
|
833 |
+
" <tr>\n",
|
834 |
+
" <th>5</th>\n",
|
835 |
+
" <td>Factspan</td>\n",
|
836 |
+
" <td>Data Scientist</td>\n",
|
837 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
838 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
839 |
+
" </tr>\n",
|
840 |
+
" <tr>\n",
|
841 |
+
" <th>6</th>\n",
|
842 |
+
" <td>MakeMyTrip</td>\n",
|
843 |
+
" <td>Senior/Lead Data Scientist</td>\n",
|
844 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
845 |
+
" <td>https://in.linkedin.com/jobs/view/senior-lead-...</td>\n",
|
846 |
+
" </tr>\n",
|
847 |
+
" <tr>\n",
|
848 |
+
" <th>7</th>\n",
|
849 |
+
" <td>Persistent Systems</td>\n",
|
850 |
+
" <td>Senior Data Scientist</td>\n",
|
851 |
+
" <td>Pune, Maharashtra, India</td>\n",
|
852 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
853 |
+
" </tr>\n",
|
854 |
+
" <tr>\n",
|
855 |
+
" <th>8</th>\n",
|
856 |
+
" <td>CodeRoofs IT Solutions</td>\n",
|
857 |
+
" <td>Generative Artificial Intelligence (AI)</td>\n",
|
858 |
+
" <td>Sahibzada Ajit Singh Nagar, Punjab, India</td>\n",
|
859 |
+
" <td>https://in.linkedin.com/jobs/view/generative-a...</td>\n",
|
860 |
+
" </tr>\n",
|
861 |
+
" <tr>\n",
|
862 |
+
" <th>9</th>\n",
|
863 |
+
" <td>LENS Corporation</td>\n",
|
864 |
+
" <td>Artificial Intelligence Researcher</td>\n",
|
865 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
866 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
867 |
+
" </tr>\n",
|
868 |
+
" </tbody>\n",
|
869 |
+
"</table>\n",
|
870 |
+
"</div>"
|
871 |
+
],
|
872 |
+
"text/plain": [
|
873 |
+
" Company Name Job Title \\\n",
|
874 |
+
"0 nasscom Artificial Intelligence (AI) \n",
|
875 |
+
"1 Deloitte Data Scientist \n",
|
876 |
+
"2 L&T Technology Services Data Scientist \n",
|
877 |
+
"3 Api Logistics Artificial Intelligence (AI) \n",
|
878 |
+
"4 E2E Networks Limited Artificial Intelligence (AI) \n",
|
879 |
+
"5 Factspan Data Scientist \n",
|
880 |
+
"6 MakeMyTrip Senior/Lead Data Scientist \n",
|
881 |
+
"7 Persistent Systems Senior Data Scientist \n",
|
882 |
+
"8 CodeRoofs IT Solutions Generative Artificial Intelligence (AI) \n",
|
883 |
+
"9 LENS Corporation Artificial Intelligence Researcher \n",
|
884 |
+
"\n",
|
885 |
+
" Location \\\n",
|
886 |
+
"0 Noida, Uttar Pradesh, India \n",
|
887 |
+
"1 Gurugram, Haryana, India \n",
|
888 |
+
"2 Hyderabad, Telangana, India \n",
|
889 |
+
"3 Gurgaon, Haryana, India \n",
|
890 |
+
"4 Delhi, India \n",
|
891 |
+
"5 Bengaluru, Karnataka, India \n",
|
892 |
+
"6 Bengaluru, Karnataka, India \n",
|
893 |
+
"7 Pune, Maharashtra, India \n",
|
894 |
+
"8 Sahibzada Ajit Singh Nagar, Punjab, India \n",
|
895 |
+
"9 Gurugram, Haryana, India \n",
|
896 |
+
"\n",
|
897 |
+
" Website URL \n",
|
898 |
+
"0 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
899 |
+
"1 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
900 |
+
"2 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
901 |
+
"3 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
902 |
+
"4 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
903 |
+
"5 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
904 |
+
"6 https://in.linkedin.com/jobs/view/senior-lead-... \n",
|
905 |
+
"7 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
906 |
+
"8 https://in.linkedin.com/jobs/view/generative-a... \n",
|
907 |
+
"9 https://in.linkedin.com/jobs/view/artificial-i... "
|
908 |
+
]
|
909 |
+
},
|
910 |
+
"execution_count": 131,
|
911 |
+
"metadata": {},
|
912 |
+
"output_type": "execute_result"
|
913 |
+
}
|
914 |
+
],
|
915 |
+
"source": [
|
916 |
+
"df=df.iloc[:10,:]\n",
|
917 |
+
"df"
|
918 |
+
]
|
919 |
+
},
|
920 |
+
{
|
921 |
+
"cell_type": "code",
|
922 |
+
"execution_count": 132,
|
923 |
+
"metadata": {},
|
924 |
+
"outputs": [],
|
925 |
+
"source": [
|
926 |
+
"def description(link):\n",
|
927 |
+
"\n",
|
928 |
+
" driver.get(link)\n",
|
929 |
+
" time.sleep(3)\n",
|
930 |
+
"\n",
|
931 |
+
" driver.find_element(by=By.CSS_SELECTOR, value='button[data-tracking-control-name=\"public_jobs_show-more-html-btn\"]').click()\n",
|
932 |
+
" time.sleep(2)\n",
|
933 |
+
"\n",
|
934 |
+
" description = driver.find_elements(by=By.CSS_SELECTOR, value='div[class=\"show-more-less-html__markup relative overflow-hidden\"]')\n",
|
935 |
+
" driver.implicitly_wait(4)\n",
|
936 |
+
" \n",
|
937 |
+
" for j in description:\n",
|
938 |
+
" return j.text"
|
939 |
+
]
|
940 |
+
},
|
941 |
+
{
|
942 |
+
"cell_type": "code",
|
943 |
+
"execution_count": 133,
|
944 |
+
"metadata": {},
|
945 |
+
"outputs": [],
|
946 |
+
"source": [
|
947 |
+
"website_url = df['Website URL'].tolist()\n",
|
948 |
+
"\n",
|
949 |
+
"job_description = []\n",
|
950 |
+
"\n",
|
951 |
+
"for i in website_url:\n",
|
952 |
+
" data = description(i)\n",
|
953 |
+
" if data is not None and len(data.strip()) > 0:\n",
|
954 |
+
" job_description.append(data)\n",
|
955 |
+
" else:\n",
|
956 |
+
" job_description.append('Description Not Available')"
|
957 |
+
]
|
958 |
+
},
|
959 |
+
{
|
960 |
+
"cell_type": "code",
|
961 |
+
"execution_count": 134,
|
962 |
+
"metadata": {},
|
963 |
+
"outputs": [
|
964 |
+
{
|
965 |
+
"data": {
|
966 |
+
"text/html": [
|
967 |
+
"<div>\n",
|
968 |
+
"<style scoped>\n",
|
969 |
+
" .dataframe tbody tr th:only-of-type {\n",
|
970 |
+
" vertical-align: middle;\n",
|
971 |
+
" }\n",
|
972 |
+
"\n",
|
973 |
+
" .dataframe tbody tr th {\n",
|
974 |
+
" vertical-align: top;\n",
|
975 |
+
" }\n",
|
976 |
+
"\n",
|
977 |
+
" .dataframe thead th {\n",
|
978 |
+
" text-align: right;\n",
|
979 |
+
" }\n",
|
980 |
+
"</style>\n",
|
981 |
+
"<table border=\"1\" class=\"dataframe\">\n",
|
982 |
+
" <thead>\n",
|
983 |
+
" <tr style=\"text-align: right;\">\n",
|
984 |
+
" <th></th>\n",
|
985 |
+
" <th>Company Name</th>\n",
|
986 |
+
" <th>Job Title</th>\n",
|
987 |
+
" <th>Location</th>\n",
|
988 |
+
" <th>Website URL</th>\n",
|
989 |
+
" <th>Job Description</th>\n",
|
990 |
+
" </tr>\n",
|
991 |
+
" </thead>\n",
|
992 |
+
" <tbody>\n",
|
993 |
+
" <tr>\n",
|
994 |
+
" <th>0</th>\n",
|
995 |
+
" <td>nasscom</td>\n",
|
996 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
997 |
+
" <td>Noida, Uttar Pradesh, India</td>\n",
|
998 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
999 |
+
" <td>Selected Intern's Day-to-day Responsibilities ...</td>\n",
|
1000 |
+
" </tr>\n",
|
1001 |
+
" <tr>\n",
|
1002 |
+
" <th>1</th>\n",
|
1003 |
+
" <td>Deloitte</td>\n",
|
1004 |
+
" <td>Data Scientist</td>\n",
|
1005 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
1006 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
1007 |
+
" <td>What impact will you make?\\nEvery day, your wo...</td>\n",
|
1008 |
+
" </tr>\n",
|
1009 |
+
" <tr>\n",
|
1010 |
+
" <th>2</th>\n",
|
1011 |
+
" <td>L&T Technology Services</td>\n",
|
1012 |
+
" <td>Data Scientist</td>\n",
|
1013 |
+
" <td>Hyderabad, Telangana, India</td>\n",
|
1014 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
1015 |
+
" <td>About the Role\\nWe are looking for Data Scient...</td>\n",
|
1016 |
+
" </tr>\n",
|
1017 |
+
" <tr>\n",
|
1018 |
+
" <th>3</th>\n",
|
1019 |
+
" <td>Api Logistics</td>\n",
|
1020 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
1021 |
+
" <td>Gurgaon, Haryana, India</td>\n",
|
1022 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
1023 |
+
" <td>We will be building an AI bot, which will be a...</td>\n",
|
1024 |
+
" </tr>\n",
|
1025 |
+
" <tr>\n",
|
1026 |
+
" <th>4</th>\n",
|
1027 |
+
" <td>E2E Networks Limited</td>\n",
|
1028 |
+
" <td>Artificial Intelligence (AI)</td>\n",
|
1029 |
+
" <td>Delhi, India</td>\n",
|
1030 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
1031 |
+
" <td>As an AI intern at E2E Networks Limited, you w...</td>\n",
|
1032 |
+
" </tr>\n",
|
1033 |
+
" <tr>\n",
|
1034 |
+
" <th>5</th>\n",
|
1035 |
+
" <td>Factspan</td>\n",
|
1036 |
+
" <td>Data Scientist</td>\n",
|
1037 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
1038 |
+
" <td>https://in.linkedin.com/jobs/view/data-scienti...</td>\n",
|
1039 |
+
" <td>Responsibilities\\nSelecting features, building...</td>\n",
|
1040 |
+
" </tr>\n",
|
1041 |
+
" <tr>\n",
|
1042 |
+
" <th>6</th>\n",
|
1043 |
+
" <td>MakeMyTrip</td>\n",
|
1044 |
+
" <td>Senior/Lead Data Scientist</td>\n",
|
1045 |
+
" <td>Bengaluru, Karnataka, India</td>\n",
|
1046 |
+
" <td>https://in.linkedin.com/jobs/view/senior-lead-...</td>\n",
|
1047 |
+
" <td>Responsibilities:\\nTrain and deploy best in cl...</td>\n",
|
1048 |
+
" </tr>\n",
|
1049 |
+
" <tr>\n",
|
1050 |
+
" <th>7</th>\n",
|
1051 |
+
" <td>Persistent Systems</td>\n",
|
1052 |
+
" <td>Senior Data Scientist</td>\n",
|
1053 |
+
" <td>Pune, Maharashtra, India</td>\n",
|
1054 |
+
" <td>https://in.linkedin.com/jobs/view/senior-data-...</td>\n",
|
1055 |
+
" <td>About Position\\n\\nWe are looking for a highly ...</td>\n",
|
1056 |
+
" </tr>\n",
|
1057 |
+
" <tr>\n",
|
1058 |
+
" <th>8</th>\n",
|
1059 |
+
" <td>CodeRoofs IT Solutions</td>\n",
|
1060 |
+
" <td>Generative Artificial Intelligence (AI)</td>\n",
|
1061 |
+
" <td>Sahibzada Ajit Singh Nagar, Punjab, India</td>\n",
|
1062 |
+
" <td>https://in.linkedin.com/jobs/view/generative-a...</td>\n",
|
1063 |
+
" <td>Selected Intern's Day-to-day Responsibilities ...</td>\n",
|
1064 |
+
" </tr>\n",
|
1065 |
+
" <tr>\n",
|
1066 |
+
" <th>9</th>\n",
|
1067 |
+
" <td>LENS Corporation</td>\n",
|
1068 |
+
" <td>Artificial Intelligence Researcher</td>\n",
|
1069 |
+
" <td>Gurugram, Haryana, India</td>\n",
|
1070 |
+
" <td>https://in.linkedin.com/jobs/view/artificial-i...</td>\n",
|
1071 |
+
" <td>Requirements:\\nExcellent knowledge of computer...</td>\n",
|
1072 |
+
" </tr>\n",
|
1073 |
+
" </tbody>\n",
|
1074 |
+
"</table>\n",
|
1075 |
+
"</div>"
|
1076 |
+
],
|
1077 |
+
"text/plain": [
|
1078 |
+
" Company Name Job Title \\\n",
|
1079 |
+
"0 nasscom Artificial Intelligence (AI) \n",
|
1080 |
+
"1 Deloitte Data Scientist \n",
|
1081 |
+
"2 L&T Technology Services Data Scientist \n",
|
1082 |
+
"3 Api Logistics Artificial Intelligence (AI) \n",
|
1083 |
+
"4 E2E Networks Limited Artificial Intelligence (AI) \n",
|
1084 |
+
"5 Factspan Data Scientist \n",
|
1085 |
+
"6 MakeMyTrip Senior/Lead Data Scientist \n",
|
1086 |
+
"7 Persistent Systems Senior Data Scientist \n",
|
1087 |
+
"8 CodeRoofs IT Solutions Generative Artificial Intelligence (AI) \n",
|
1088 |
+
"9 LENS Corporation Artificial Intelligence Researcher \n",
|
1089 |
+
"\n",
|
1090 |
+
" Location \\\n",
|
1091 |
+
"0 Noida, Uttar Pradesh, India \n",
|
1092 |
+
"1 Gurugram, Haryana, India \n",
|
1093 |
+
"2 Hyderabad, Telangana, India \n",
|
1094 |
+
"3 Gurgaon, Haryana, India \n",
|
1095 |
+
"4 Delhi, India \n",
|
1096 |
+
"5 Bengaluru, Karnataka, India \n",
|
1097 |
+
"6 Bengaluru, Karnataka, India \n",
|
1098 |
+
"7 Pune, Maharashtra, India \n",
|
1099 |
+
"8 Sahibzada Ajit Singh Nagar, Punjab, India \n",
|
1100 |
+
"9 Gurugram, Haryana, India \n",
|
1101 |
+
"\n",
|
1102 |
+
" Website URL \\\n",
|
1103 |
+
"0 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
1104 |
+
"1 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
1105 |
+
"2 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
1106 |
+
"3 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
1107 |
+
"4 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
1108 |
+
"5 https://in.linkedin.com/jobs/view/data-scienti... \n",
|
1109 |
+
"6 https://in.linkedin.com/jobs/view/senior-lead-... \n",
|
1110 |
+
"7 https://in.linkedin.com/jobs/view/senior-data-... \n",
|
1111 |
+
"8 https://in.linkedin.com/jobs/view/generative-a... \n",
|
1112 |
+
"9 https://in.linkedin.com/jobs/view/artificial-i... \n",
|
1113 |
+
"\n",
|
1114 |
+
" Job Description \n",
|
1115 |
+
"0 Selected Intern's Day-to-day Responsibilities ... \n",
|
1116 |
+
"1 What impact will you make?\\nEvery day, your wo... \n",
|
1117 |
+
"2 About the Role\\nWe are looking for Data Scient... \n",
|
1118 |
+
"3 We will be building an AI bot, which will be a... \n",
|
1119 |
+
"4 As an AI intern at E2E Networks Limited, you w... \n",
|
1120 |
+
"5 Responsibilities\\nSelecting features, building... \n",
|
1121 |
+
"6 Responsibilities:\\nTrain and deploy best in cl... \n",
|
1122 |
+
"7 About Position\\n\\nWe are looking for a highly ... \n",
|
1123 |
+
"8 Selected Intern's Day-to-day Responsibilities ... \n",
|
1124 |
+
"9 Requirements:\\nExcellent knowledge of computer... "
|
1125 |
+
]
|
1126 |
+
},
|
1127 |
+
"execution_count": 134,
|
1128 |
+
"metadata": {},
|
1129 |
+
"output_type": "execute_result"
|
1130 |
+
}
|
1131 |
+
],
|
1132 |
+
"source": [
|
1133 |
+
"df['Job Description'] = pd.DataFrame(job_description, columns=['Description'])\n",
|
1134 |
+
"df"
|
1135 |
+
]
|
1136 |
+
},
|
1137 |
+
{
|
1138 |
+
"cell_type": "code",
|
1139 |
+
"execution_count": 135,
|
1140 |
+
"metadata": {},
|
1141 |
+
"outputs": [
|
1142 |
+
{
|
1143 |
+
"name": "stdout",
|
1144 |
+
"output_type": "stream",
|
1145 |
+
"text": [
|
1146 |
+
"Company Name : nasscom\n",
|
1147 |
+
"Job Title : Artificial Intelligence (AI)\n",
|
1148 |
+
"Location : Noida, Uttar Pradesh, India\n",
|
1149 |
+
"Website URL : https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-nasscom-3775137519?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=tIIHxWA8tlGFmgU1MGBbpQ%3D%3D&position=1&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1150 |
+
"Description : Selected Intern's Day-to-day Responsibilities Include\n",
|
1151 |
+
"\n",
|
1152 |
+
"Assist in the development and refinement of work plans for Nasscom's responsible AI programs and activities\n",
|
1153 |
+
"Collaborate with team members to identify key objectives, milestones, and deliverables\n",
|
1154 |
+
"Provide support in implementing responsible AI programs and events\n",
|
1155 |
+
"Assist in organizing workshops, seminars, and training sessions for stakeholders\n",
|
1156 |
+
"Coordinate logistical arrangements, including venue bookings, participant invitations, and materials preparation\n",
|
1157 |
+
"Conduct thorough research and analysis on responsible AI adoption practices, policies, and regulations to generate insights and recommendations for stakeholders\n",
|
1158 |
+
"Prepare comprehensive reports, briefings, and presentations summarizing research findings and key takeaways\n",
|
1159 |
+
"Identify and map potential partners, including industry organizations, academic institutions, and government agencies, with a focus on responsible AI adoption\n",
|
1160 |
+
"Conduct research to understand the expertise, capabilities, and interests of potential partners\n",
|
1161 |
+
"Assist in building relationships and collaborations with key stakeholders to advance Nasscom's responsible AI initiatives\n",
|
1162 |
+
"Company Name : Deloitte\n",
|
1163 |
+
"Job Title : Data Scientist\n",
|
1164 |
+
"Location : Gurugram, Haryana, India\n",
|
1165 |
+
"Website URL : https://in.linkedin.com/jobs/view/data-scientist-at-deloitte-3769279295?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=V1WsNRc58GvBk7auB%2FKsyw%3D%3D&position=2&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1166 |
+
"Description : What impact will you make?\n",
|
1167 |
+
"Every day, your work will make an impact that matters, while you thrive in a dynamic culture of inclusion, collaboration and high performance. As the undisputed leader in professional services, Deloitte is where you’ll find unrivalled opportunities to succeed and realize your full potential\n",
|
1168 |
+
"\n",
|
1169 |
+
"Deloitte is where you’ll find unrivalled opportunities to succeed and realize your full potential.\n",
|
1170 |
+
"Location - Delhi NCR\n",
|
1171 |
+
"Designation: Deputy Manager / Manager\n",
|
1172 |
+
"\n",
|
1173 |
+
"Summary:\n",
|
1174 |
+
"We are looking for a highly skilled and experienced Generative AI professional with expertise in Python, Machine Learning, Data Science, and Statistics. The ideal candidate should have a strong background in Generative AI, NLP (Natural Language Processing), and conversational chatbot development. Additionally, experience with Lang Chain, using LLM's and proficiency in either of cloud platforms is required.\n",
|
1175 |
+
"As a Generative AI professional, you will be responsible for delivering innovative AI solutions to our clients. Your role will involve:\n",
|
1176 |
+
"Design, develop, and deploy generative AI models\n",
|
1177 |
+
"Leveraging your expertise in Generative AI, Python, Machine Learning, Data Science, and Statistics to develop cutting-edge solutions for our clients.\n",
|
1178 |
+
"Utilizing NLP techniques, LangChain, and LLM's to develop conversational chatbots and language models tailored to our clients' needs.\n",
|
1179 |
+
"Collaborating with cross-functional teams to design and implement advanced AI models and algorithms.\n",
|
1180 |
+
"Providing technical expertise and thought leadership in the field of Generative AI and NLP to guide clients in adopting AI-driven solutions.\n",
|
1181 |
+
"Conducting data analysis, preprocessing, and modeling to extract valuable insights and drive data-driven decision-making.\n",
|
1182 |
+
"Staying up to date with the latest advancements in AI technologies, frameworks, and tools, and proactively learning and adopting new technologies to enhance our offerings.\n",
|
1183 |
+
"Demonstrating a strong understanding of cloud platforms, for deploying AI applications.\n",
|
1184 |
+
"Communicating effectively with clients, presenting findings, recommendations, and project progress in a clear and concise manner.\n",
|
1185 |
+
"Documenting project requirements, methodologies, and outcomes for internal and external stakeholders.\n",
|
1186 |
+
"Mentoring and guiding junior team members, sharing your knowledge and best practices to foster their growth.\n",
|
1187 |
+
"Qualifications:\n",
|
1188 |
+
"Bachelor's degree or higher in Computer Science, Data Science, or a related field.\n",
|
1189 |
+
"Minimum of 5+ years of relevant professional experience in Python, R, Machine Learning/Deep Learning, Data Science, and Statistics.\n",
|
1190 |
+
"Strong expertise in NLP, Generative AI concepts, LangChain, Other alternative LLM frameworks and conversational chatbots.\n",
|
1191 |
+
"Should have strong knowledge and experience in working with Deep Learning projects using CNN, GAN, Transformers, Encoder and decoder algorithms or any other image generation and classification use cases.\n",
|
1192 |
+
"Familiarity with LLM's (Large Language Modeling) and their applications.\n",
|
1193 |
+
"Proficiency in cloud platforms like Azure/AWS, including experience with deploying AI applications.\n",
|
1194 |
+
"Solid programming skills in Python and experience with relevant libraries and frameworks (e.g., TensorFlow, PyTorch, scikit-learn).\n",
|
1195 |
+
"Proven track record of delivering successful AI projects and driving business impact.\n",
|
1196 |
+
"Excellent communication, presentation, and documentation skills.\n",
|
1197 |
+
"Strong problem-solving abilities and a proactive attitude towards learning and adopting new technologies.\n",
|
1198 |
+
"Ability to work independently, manage multiple projects simultaneously, and collaborate effectively with diverse stakeholders.\n",
|
1199 |
+
"Primary mandatory skills:\n",
|
1200 |
+
"Deep learning (CNN, RNN, GAN, Transformers, Encoder and decoder architecture – at least two of them is mandatory), Generative AI, Python (NumPy & Pandas)\n",
|
1201 |
+
"NLP, Generative AI, Chatbot, any cloud knowledge like GCP/AWS\n",
|
1202 |
+
"Database Systems - SQL and NoSQL databases: Depending on the project's requirements, knowledge of databases like MySQL, PostgreSQL, MongoDB, etc.\n",
|
1203 |
+
"Knowledge of AWS SageMaker, Azure ML, Google AI Platform: Cloud-based platforms for building, training, and deploying machine learning models.\n",
|
1204 |
+
"Secondary Skills:\n",
|
1205 |
+
"Probability and Statistics, JavaScript, Optimization Techniques, Research and Problem-Solving\n",
|
1206 |
+
"Domain Expertise in BFSI, Fintech, Life science, Manufacturing, Telecom, Media & Entertainment\n",
|
1207 |
+
"\n",
|
1208 |
+
"\n",
|
1209 |
+
"Your role as a leader\n",
|
1210 |
+
"At Deloitte India, we believe in the importance of leadership at all levels. We expect our people to embrace and live our purpose by challenging themselves to identify issues that are most important for our clients, our people, and for society and make an impact that matters.\n",
|
1211 |
+
"\n",
|
1212 |
+
"How you’ll grow\n",
|
1213 |
+
"At Deloitte, our professional development plan focuses on helping people at every level of their career to identify and use their strengths to do their best work every day. From entry-level employees to senior leaders, we believe there’s always room to learn. We offer opportunities to help build world-class skills in addition to hands-on experience in the global, fast-changing business world. From on-the-job learning experiences to formal development programs at Deloitte University, our professionals have a variety of opportunities to continue to grow throughout their career. Explore Deloitte University, The Leadership Centre.\n",
|
1214 |
+
"Company Name : L&T Technology Services\n",
|
1215 |
+
"Job Title : Data Scientist\n",
|
1216 |
+
"Location : Hyderabad, Telangana, India\n",
|
1217 |
+
"Website URL : https://in.linkedin.com/jobs/view/data-scientist-at-l-t-technology-services-3767553599?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=ZIbaag6Y46gRRLftn3vpFQ%3D%3D&position=3&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1218 |
+
"Description : About the Role\n",
|
1219 |
+
"We are looking for Data Scientist who will work closely with Product management & Engineering teams to build, validate & deploy the best-in-class AI / ML / Deep Learning based solutions from scratch. You will be joining a dynamic team heavy-lifting various technological challenges and responsibilities.\n",
|
1220 |
+
"Location: Hyderabad\n",
|
1221 |
+
"Reports to: Delivery Head\n",
|
1222 |
+
"Responsibilities\n",
|
1223 |
+
"● Understand the Functional specifications & derive AI / ML / Statistical specifications\n",
|
1224 |
+
"● Exploratory Data Analysis with Python, R , Excel or any available statistical platforms to understand the summary statistics, Distribution of the dataset, Outlier detection / detection of influencing observations etc.,\n",
|
1225 |
+
"● Data Imputation & Clensing the data through industry best statistical practices\n",
|
1226 |
+
"● Data Collection both internally and from public domains\n",
|
1227 |
+
"● Feature engineering, identify the relevant attributes / features\n",
|
1228 |
+
"● Building the models to solve the industry problems with reasonable accuracy to fit for use in production\n",
|
1229 |
+
"● Identify and understand the Diagnostics of Models to know the stability of the model(s) in stressed conditions\n",
|
1230 |
+
"● Validate the model and fine-tuning the model on continual basis\n",
|
1231 |
+
"● Model deployment under given framework\n",
|
1232 |
+
"● Coordinate with our technology partners / vendors to build the best-in-class AI / ML based solutions\n",
|
1233 |
+
"● Develop and maintain strong product knowledge.\n",
|
1234 |
+
"● Stay abreast with new innovations and the latest technology trends and explore ways of leveraging these for improving the product in alignment with the business.\n",
|
1235 |
+
"\n",
|
1236 |
+
"\n",
|
1237 |
+
"\n",
|
1238 |
+
"Utility Domain Experience.\n",
|
1239 |
+
"Company Name : Api Logistics\n",
|
1240 |
+
"Job Title : Artificial Intelligence (AI)\n",
|
1241 |
+
"Location : Gurgaon, Haryana, India\n",
|
1242 |
+
"Website URL : https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-api-logistics-3775135785?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=gHlkHtkA61GpIwctphtUoA%3D%3D&position=5&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1243 |
+
"Description : We will be building an AI bot, which will be able to show data visualizations based on text-based prompts from the user. 2 months' accommodation will be provided to the interns coming from other states.\n",
|
1244 |
+
"\n",
|
1245 |
+
"Selected Intern's Day-to-day Responsibilities Include\n",
|
1246 |
+
"\n",
|
1247 |
+
"Exploring the architecture of the bot\n",
|
1248 |
+
"working with javascript libraries like chart.js, and D3.js to create data visualizations\n",
|
1249 |
+
"Creating a react-based web portal for the data visualizations\n",
|
1250 |
+
"Company Name : E2E Networks Limited\n",
|
1251 |
+
"Job Title : Artificial Intelligence (AI)\n",
|
1252 |
+
"Location : Delhi, India\n",
|
1253 |
+
"Website URL : https://in.linkedin.com/jobs/view/artificial-intelligence-ai-at-e2e-networks-limited-3776977208?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=fqeNZz9ofq7yszds72vw%2FA%3D%3D&position=6&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1254 |
+
"Description : As an AI intern at E2E Networks Limited, you will be part of an exciting and innovative team that is revolutionizing the world of technology. We are seeking a talented individual with a strong background in Artificial Intelligence and Machine Learning to join our ranks. This is a unique opportunity to work on cutting-edge projects, gain hands-on experience, and contribute to groundbreaking advancements in the field.\n",
|
1255 |
+
"\n",
|
1256 |
+
"Key Responsibilities\n",
|
1257 |
+
"\n",
|
1258 |
+
"Collaborate with the AI team to develop and implement AI models and algorithms.\n",
|
1259 |
+
"Assist in building and training machine learning models for various applications.\n",
|
1260 |
+
"Conduct research to identify new AI techniques and algorithms to enhance our products.\n",
|
1261 |
+
"Support the team in data preprocessing, feature engineering, and model evaluation.\n",
|
1262 |
+
"Help in designing and implementing AI experiments and analyzing the results.\n",
|
1263 |
+
"Contribute to the development and improvement of AI frameworks and tools.\n",
|
1264 |
+
"Stay updated with the latest advancements in AI and ML technologies and share knowledge with the team.\n",
|
1265 |
+
"\n",
|
1266 |
+
"If you have a passion for AI, a thirst for knowledge, and a desire to make a real impact in the industry, then this is the perfect opportunity for you. Join us and be part of our mission to shape the future of technology with Artificial Intelligence and Machine Learning.\n",
|
1267 |
+
"Company Name : Factspan\n",
|
1268 |
+
"Job Title : Data Scientist\n",
|
1269 |
+
"Location : Bengaluru, Karnataka, India\n",
|
1270 |
+
"Website URL : https://in.linkedin.com/jobs/view/data-scientist-at-factspan-3773099421?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=zml1CQ7VBx2LhdcwmqY2%2Fg%3D%3D&position=8&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1271 |
+
"Description : Responsibilities\n",
|
1272 |
+
"Selecting features, building, and optimizing classifiers/regression using machine learning and deep learning techniques\n",
|
1273 |
+
"Proficient in using data analytics tools to perform queries and analyses and for defining and correlating data, and skilled at utilizing data visualization platforms to organize and present summarizations, predictive analysis, comparative analysis, dashboards, and reporting.\n",
|
1274 |
+
"Processing, cleansing, and verifying the integrity of data used for analysis.\n",
|
1275 |
+
"Performing data mining and analytics to support ongoing continuous risk monitoring and risk assessments of operational data to recognize patterns and trends, investigate anomalies, and assess internal control environment.\n",
|
1276 |
+
"Utilize data analysis by leveraging various statistical techniques, and predictive modeling to drive and identify indicators of risk.\n",
|
1277 |
+
"Drive efficiency by automation of manual processes\n",
|
1278 |
+
"\n",
|
1279 |
+
"More responsibilities in detail:\n",
|
1280 |
+
"Excellent understanding of machine learning algorithms, such as Random Forest, Gradient Boosting, Naive Bayes, SVM, KNN. Good understanding of deep learning algorithms, such as DNN, CNN, RNN, LSTM, Autoencoders.\n",
|
1281 |
+
"Deep Knowledge of ML/AI software and packages such as python: scikit-learn, TensorFlow and R: CARET, PyTorch.\n",
|
1282 |
+
"Proficiency in statistics concepts: sampling theory, descriptive statistics, probability distributions, statistical tests, dimensionality, reduction, Hypothesis testing, maximum likelihood estimators, inference, etc.\n",
|
1283 |
+
"Expertise in model validation, hyperparameter tuning, and model selection techniques such as cross validation, leave-one-out, bootstrap.\n",
|
1284 |
+
"Proficiency in using query languages such as SQL and spark.\n",
|
1285 |
+
"Services, Reporting Service, Power BI, Python, PySpark- Distributed Computing. Machine Learning, Times Series, Data Mining, Mathematical, Modeling, Probability and Stochastic Processes\n",
|
1286 |
+
"Qualifications & Experience:\n",
|
1287 |
+
"Bachelor’s/master’s degree in engineering.\n",
|
1288 |
+
"6+ years of related experience is required.\n",
|
1289 |
+
"Must have: AI/ML, Data Science, Python.\n",
|
1290 |
+
"Good to have: SQL, Tableau\n",
|
1291 |
+
"Company Name : MakeMyTrip\n",
|
1292 |
+
"Job Title : Senior/Lead Data Scientist\n",
|
1293 |
+
"Location : Bengaluru, Karnataka, India\n",
|
1294 |
+
"Website URL : https://in.linkedin.com/jobs/view/senior-lead-data-scientist-at-makemytrip-3775206966?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=BpwORRecD4ElN4ikRtlXLA%3D%3D&position=9&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1295 |
+
"Description : Responsibilities:\n",
|
1296 |
+
"Train and deploy best in class deep learning models for ranking, pricing, recommendation, representation learning, FinTech products.\n",
|
1297 |
+
"Work with stakeholders at various stages of project, lead project development from inception to completion.\n",
|
1298 |
+
"Be a SQL python ninja, understand data thoroughly, build smart AI/ML systems.\n",
|
1299 |
+
"Opportunity to work with large click stream e-commerce data sets, at MakeMyTrip and GoIbibo, i. e., in rapidly growing travel space.\n",
|
1300 |
+
"Build and own robust data science APIs meeting 99% SLA at very high RPS. Also ensure training/inference MLOps rigour.\n",
|
1301 |
+
"Show business impact while having opportunity to build best in class AI/ML models.\n",
|
1302 |
+
"\n",
|
1303 |
+
"Requirements:\n",
|
1304 |
+
"Bachelor's degree in mathematics, Statistics, related technical field, or equivalent practical experience.\n",
|
1305 |
+
"A minimum of 4-7 years of experience.\n",
|
1306 |
+
"ML Modeling, Ranking, Recommendations, or Personalization systems.\n",
|
1307 |
+
"Experience with statistical data analysis such as linear models, multivariate analysis, stochastic models, and sampling methods.\n",
|
1308 |
+
"Experience with applying machine learning techniques to big data systems (e. g., Spark) with TB to PB scale datasets.\n",
|
1309 |
+
"Experience with data querying languages (e. g., SQL), scripting languages (e. g., Python), and/or statistical/mathematical software.\n",
|
1310 |
+
"Ph. D. in a quantitative field.\n",
|
1311 |
+
"Strong research record demonstrated through publications.\n",
|
1312 |
+
"Knowledge of advanced ML techniques such as Classification, Prediction, Recommender Systems, Anomaly Detection, Optimization, Privacy Preserving Machine Learning.\n",
|
1313 |
+
"Experience with design and analysis of experiments.\n",
|
1314 |
+
"Experience with large-scale A/B testing systems, especially in the domain of online advertising or online commerce.\n",
|
1315 |
+
"Company Name : Persistent Systems\n",
|
1316 |
+
"Job Title : Senior Data Scientist\n",
|
1317 |
+
"Location : Pune, Maharashtra, India\n",
|
1318 |
+
"Website URL : https://in.linkedin.com/jobs/view/senior-data-scientist-at-persistent-systems-3767577121?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=i7Fj%2BdTM9QYeNAmPbd4QNQ%3D%3D&position=12&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1319 |
+
"Description : About Position\n",
|
1320 |
+
"\n",
|
1321 |
+
"We are looking for a highly skilled and experienced Senior Data Scientist to join our team of generative AI researchers and engineers. The ideal candidate will have a strong background in machine learning, natural language processing, and large language models. They will be responsible for leading the development of new generative AI models and applications\n",
|
1322 |
+
"\n",
|
1323 |
+
"Job Location: Pune, Goa, Bangalore\n",
|
1324 |
+
"Job Reference ID: PSL008075-1-2\n",
|
1325 |
+
"\n",
|
1326 |
+
"What you’ll do\n",
|
1327 |
+
"Design or modify system architectures to meet certain business needs\n",
|
1328 |
+
"Design solutions for potential problems\n",
|
1329 |
+
"Work with business leaders to develop IT strategies\n",
|
1330 |
+
"Help and work with leads in conceptualizing and developing proof of concepts\n",
|
1331 |
+
"Drive and assist the team in build & deployments (prod migration) and coding\n",
|
1332 |
+
"Remediate any performance issues identified during performance testing\n",
|
1333 |
+
"Provide support and issue resolution for the quality assurance team\n",
|
1334 |
+
"\n",
|
1335 |
+
"Expertise you’ll bring\n",
|
1336 |
+
"\n",
|
1337 |
+
"10+ years of experience leading data science projects.\n",
|
1338 |
+
"Strong programming skills in Python.\n",
|
1339 |
+
"Experience with large language models, such as GPT-4 ,GPT 3.5, Turbo and PaLM.\n",
|
1340 |
+
"Experience with vector databases.\n",
|
1341 |
+
"Experience with LangChain and Named Entity Recognition (NER)\n",
|
1342 |
+
"Excellent problem-solving and analytical skills.\n",
|
1343 |
+
"Excellent communication skills.\n",
|
1344 |
+
"\n",
|
1345 |
+
"Benefits\n",
|
1346 |
+
"\n",
|
1347 |
+
"Competitive salary and benefits package\n",
|
1348 |
+
"Culture focused on talent development with quarterly promotion cycles and company-sponsored higher education and certifications\n",
|
1349 |
+
"Opportunity to work with cutting-edge technologies\n",
|
1350 |
+
"Employee engagement initiatives such as project parties, flexible work hours, and Long Service awards\n",
|
1351 |
+
"Annual health check-ups\n",
|
1352 |
+
"Insurance coverage: group term life, personal accident, and Mediclaim hospitalization for self, spouse, two children, and parents\n",
|
1353 |
+
"\n",
|
1354 |
+
"Our company fosters a values-driven and people-centric work environment that enables our employees to:\n",
|
1355 |
+
"\n",
|
1356 |
+
"Accelerate growth, both professionally and personally\n",
|
1357 |
+
"Impact the world in powerful, positive ways, using the latest technologies\n",
|
1358 |
+
"Enjoy collaborative innovation, with diversity and work-life wellbeing at the core\n",
|
1359 |
+
"Unlock global opportunities to work and learn with the industry’s best\n",
|
1360 |
+
"\n",
|
1361 |
+
"Let's unleash your full potential. See Beyond, Rise Above\n",
|
1362 |
+
"Company Name : CodeRoofs IT Solutions\n",
|
1363 |
+
"Job Title : Generative Artificial Intelligence (AI)\n",
|
1364 |
+
"Location : Sahibzada Ajit Singh Nagar, Punjab, India\n",
|
1365 |
+
"Website URL : https://in.linkedin.com/jobs/view/generative-artificial-intelligence-ai-at-coderoofs-it-solutions-3775140084?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=n05H%2BvlRpxM5pKsjT7X4lg%3D%3D&position=13&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1366 |
+
"Description : Selected Intern's Day-to-day Responsibilities Include\n",
|
1367 |
+
"\n",
|
1368 |
+
"Utilize ChatGPT, mid-journey image generation, and other cutting-edge generative AI tools for innovative content creation\n",
|
1369 |
+
"Work with stable defusion, Kaiber, and other video-creating AI platforms to generate compelling visual stories\n",
|
1370 |
+
"Implement open-source codes like Bark and wav2lip for text-to-speech and speech-to-text conversions, making our content more accessible and engaging\n",
|
1371 |
+
"Be involved in the training of LLM models, ControlNet, and other open-source generative AI models to tailor them for specific projects\n",
|
1372 |
+
"Utilize GPU computing, Google Colab, Hugging Face, CivitAI, and other platforms to enhance and streamline our AI operations\n",
|
1373 |
+
"Leverage your skills in Python, data science, and machine learning to analyze data, optimize algorithms, and improve the efficiency of our AI tools\n",
|
1374 |
+
"Working on Stable Diffusion APIs to create images with accuracy\n",
|
1375 |
+
"Using hugging face deployment to deploy machine learning models\n",
|
1376 |
+
"Working on OpenCV to create bubbles on comic characters\n",
|
1377 |
+
"Creating a comic book using Stable Diffusion\n",
|
1378 |
+
"\n",
|
1379 |
+
"Requirements: Possess a robust understanding of machine learning (ML) and Stable Diffusion APIs.\n",
|
1380 |
+
"\n",
|
1381 |
+
"Note\n",
|
1382 |
+
"\n",
|
1383 |
+
"We kindly request that only those who are highly confident and possess substantial experience in the field of ML apply for this opportunity\n",
|
1384 |
+
"Candidates without experience would not be considered\n",
|
1385 |
+
"Company Name : LENS Corporation\n",
|
1386 |
+
"Job Title : Artificial Intelligence Researcher\n",
|
1387 |
+
"Location : Gurugram, Haryana, India\n",
|
1388 |
+
"Website URL : https://in.linkedin.com/jobs/view/artificial-intelligence-researcher-at-lens-corporation-3772656078?refId=D1k0gOpiu4aLrlzBTdiRLg%3D%3D&trackingId=7XGPeivl3BGGdwu%2FltrCnw%3D%3D&position=16&pageNum=0&trk=public_jobs_jserp-result_search-card\n",
|
1389 |
+
"Description : Requirements:\n",
|
1390 |
+
"Excellent knowledge of computer vision concepts, including but not limited to Image Classification, Object Detection, and Semantic Segmentation, developed using state-of-the-art deep learning algorithms.\n",
|
1391 |
+
"Hands-on experience developing efficient and real-time convolutional neural network (CNN) models for computer vision tasks.\n",
|
1392 |
+
"Strong proficiency in at least one of the deep learning frameworks, such as PyTorch, TensorFlow, or Caffe, with the ability to apply them to computer vision problems.\n",
|
1393 |
+
"Quick prototyping skills in Python and coding and debugging proficiency in C++.\n",
|
1394 |
+
"Good communication and collaboration skills to work effectively in a team and communicate complex technical concepts.\n",
|
1395 |
+
"\n",
|
1396 |
+
"Qualifications:\n",
|
1397 |
+
"A Ph.D. degree (including candidates at various stages of their Ph.D., such as thesis submission, thesis submitted, degree awaited, synopsis seminar completed, defense completed) in Deep Learning with hands-on coding skills and a passion for an industrial career will be preferred.\n",
|
1398 |
+
"Master's or Bachelor's degree with thorough industrial work experience in developing computer vision applications using deep learning.\n",
|
1399 |
+
"Postgraduates or Undergraduates with a strong academic background in Deep Learning, Computer Vision, or related fields, and demonstrated coding skills, are also encouraged to apply.\n",
|
1400 |
+
"\n",
|
1401 |
+
"Preferred:\n",
|
1402 |
+
"Publications in top-tier computer vision conferences like CVPR, ICCV, ECCV, or major AI conferences like NeurIPS.\n",
|
1403 |
+
"Knowledge of computer vision libraries and tools, including OpenCV and DLib, and a solid understanding of image processing and computer vision fundamentals.\n",
|
1404 |
+
"Hands-on experience with model compression and pruning techniques in deep learning.\n",
|
1405 |
+
"Good exposure to various deep learning architectures, such as Artificial Neural Networks (ANN), Deep Neural Networks (DNN), Convolutional Neural Networks (CNN), Recurrent Neural Networks (RNN), and Long Short-Term Memory (LSTM) networks.\n",
|
1406 |
+
"Familiarity with GPU programming (e.g., CUDA, OpenCL) for efficient deep-learning computations.\n",
|
1407 |
+
"\n",
|
1408 |
+
"Pay is competitive as per market standards.\n"
|
1409 |
+
]
|
1410 |
+
}
|
1411 |
+
],
|
1412 |
+
"source": [
|
1413 |
+
"l = len(df['Company Name'])\n",
|
1414 |
+
"for i in range(0,l):\n",
|
1415 |
+
" print(f\"Company Name : {df.iloc[i,0]}\")\n",
|
1416 |
+
" print(f\"Job Title : {df.iloc[i,1]}\")\n",
|
1417 |
+
" print(f\"Location : {df.iloc[i,2]}\")\n",
|
1418 |
+
" print(f\"Website URL : {df.iloc[i,3]}\")\n",
|
1419 |
+
" print(f\"Description : {df.iloc[i,4]}\")\n"
|
1420 |
+
]
|
1421 |
+
},
|
1422 |
+
{
|
1423 |
+
"cell_type": "code",
|
1424 |
+
"execution_count": null,
|
1425 |
+
"metadata": {},
|
1426 |
+
"outputs": [],
|
1427 |
+
"source": []
|
1428 |
+
}
|
1429 |
+
],
|
1430 |
+
"metadata": {
|
1431 |
+
"kernelspec": {
|
1432 |
+
"display_name": "Python 3",
|
1433 |
+
"language": "python",
|
1434 |
+
"name": "python3"
|
1435 |
+
},
|
1436 |
+
"language_info": {
|
1437 |
+
"codemirror_mode": {
|
1438 |
+
"name": "ipython",
|
1439 |
+
"version": 3
|
1440 |
+
},
|
1441 |
+
"file_extension": ".py",
|
1442 |
+
"mimetype": "text/x-python",
|
1443 |
+
"name": "python",
|
1444 |
+
"nbconvert_exporter": "python",
|
1445 |
+
"pygments_lexer": "ipython3",
|
1446 |
+
"version": "3.11.6"
|
1447 |
+
},
|
1448 |
+
"orig_nbformat": 4
|
1449 |
+
},
|
1450 |
+
"nbformat": 4,
|
1451 |
+
"nbformat_minor": 2
|
1452 |
+
}
|
README.md
ADDED
@@ -0,0 +1,129 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# AI-Powered Resume Analyzer and LinkedIn Scraper with Selenium
|
2 |
+
|
3 |
+
**Introduction**
|
4 |
+
|
5 |
+
Resume Analyzer AI" leverages the power of LLM and OpenAI as an advanced Streamlit application, specializing in thorough resume analysis. It excels at summarizing the resume, evaluating strengths, identifying weaknesses, and offering personalized improvement suggestions, while also recommending the perfect job titles. Additionally, it seamlessly employs Selenium to extract vital LinkedIn data, encompassing company names, job titles, locations, job URLs, and detailed job descriptions. In essence, Resume Analyzer AI simplifies the job-seeking journey by equipping users with comprehensive insights to elevate their career opportunities.
|
6 |
+
|
7 |
+
<br />
|
8 |
+
|
9 |
+
**Table of Contents**
|
10 |
+
|
11 |
+
1. Key Technologies and Skills
|
12 |
+
2. Installation
|
13 |
+
3. Usage
|
14 |
+
4. Features
|
15 |
+
5. Contributing
|
16 |
+
6. License
|
17 |
+
7. Contact
|
18 |
+
|
19 |
+
<br />
|
20 |
+
|
21 |
+
**Key Technologies and Skills**
|
22 |
+
- Python
|
23 |
+
- NumPy
|
24 |
+
- Pandas
|
25 |
+
- LangChain
|
26 |
+
- LLM
|
27 |
+
- OpenAI
|
28 |
+
- Selenium
|
29 |
+
- Streamlit
|
30 |
+
- Hugging Face
|
31 |
+
- AWS
|
32 |
+
|
33 |
+
<br />
|
34 |
+
|
35 |
+
**Installation**
|
36 |
+
|
37 |
+
To run this project, you need to install the following packages:
|
38 |
+
|
39 |
+
```python
|
40 |
+
pip install numpy
|
41 |
+
pip install pandas
|
42 |
+
pip install streamlit
|
43 |
+
pip install streamlit_option_menu
|
44 |
+
pip install streamlit_extras
|
45 |
+
pip install PyPDF2
|
46 |
+
pip install langchain
|
47 |
+
pip install openai
|
48 |
+
pip install tiktoken
|
49 |
+
pip install faiss-cpu
|
50 |
+
pip install selenium
|
51 |
+
```
|
52 |
+
|
53 |
+
<br />
|
54 |
+
|
55 |
+
**Usage**
|
56 |
+
|
57 |
+
To use this project, follow these steps:
|
58 |
+
|
59 |
+
`
|
60 |
+
2. Install the required packages: ```pip install -r requirements.txt```
|
61 |
+
3. Run the Streamlit app: ```streamlit run app.py```
|
62 |
+
4. Access the app in your browser at ```http://localhost:8501```
|
63 |
+
|
64 |
+
<br />
|
65 |
+
|
66 |
+
**Features**
|
67 |
+
|
68 |
+
**Easy User Experience:**
|
69 |
+
- Resume Analyzer AI makes it easy for users. You can upload your resume and enter your OpenAI API key without any hassle. The application is designed to be user-friendly so that anyone can use its powerful resume analysis features.
|
70 |
+
- It also uses the PyPDF2 library to quickly extract text from your uploaded resume, which is the first step in doing a thorough analysis.
|
71 |
+
|
72 |
+
**Smart Text Analysis with Langchain:**
|
73 |
+
- What makes it special is how it analyzes text. It uses a smart method called the Langchain library to break long sections of text from resumes into smaller chunks, making them more meaningful.
|
74 |
+
- This clever technique improves the accuracy of the resume analysis, and it gives users practical advice on how to enhance their job prospects.
|
75 |
+
|
76 |
+
**Enhanced OpenAI Integration with FAISS:**
|
77 |
+
- Seamlessly connecting to OpenAI services, the application establishes a secure connection using your OpenAI API key. This integration forms the basis for robust interactions, facilitating advanced analysis and efficient information retrieval.
|
78 |
+
- It uses the FAISS(Facebook AI Similarity Search) library to convert both the text chunks and query text data into numerical vectors, simplifying the analysis process and enabling the retrieval of pertinent information.
|
79 |
+
|
80 |
+
**Intelligent Chunk Selection and LLM:**
|
81 |
+
- Utilizing similarity search, Resume Analyzer AI compares the query and chunks, enabling the selection of the top 'K' most similar chunks based on their similarity scores.
|
82 |
+
- Simultaneously, the application creates an OpenAI object, particularly an LLM (Large Language Model), using the ChatGPT 3.5 Turbo model and your OpenAI API key.
|
83 |
+
|
84 |
+
**Robust Question-Answering Pipeline:**
|
85 |
+
- This integration establishes a robust question-answering (QA) pipeline, making use of the load_qa_chain function, which encompasses multiple components, including the language model.
|
86 |
+
- The QA chain efficiently handles lists of input documents (docs) and a list of questions (chunks), with the response variable capturing the results, such as answers to the questions derived from the content within the input documents.
|
87 |
+
|
88 |
+
**Comprehensive Resume Analysis:**
|
89 |
+
- **Summary:** Resume Analyzer AI provides a quick, comprehensive overview of resumes, emphasizing qualifications, key experience, skills, projects, and achievements. Users can swiftly grasp profiles, enhancing review efficiency and insight.
|
90 |
+
- **Strength:** Effortlessly conducting a comprehensive resume review, it analyzes qualifications, experience, and accomplishments. It subsequently highlights strengths, providing job seekers with a competitive edge.
|
91 |
+
- **Weakness:** AI conducts thorough analysis to pinpoint weaknesses and offers tailored solutions for transforming them into strengths, empowering job seekers.
|
92 |
+
- **Suggestion:** AI provides personalized job title recommendations that align closely with the user's qualifications and resume content, facilitating an optimized job search experience.
|
93 |
+
|
94 |
+
<br />
|
95 |
+
|
96 |
+
|
97 |
+
|
98 |
+
<br />
|
99 |
+
|
100 |
+
**Selenium-Powered LinkedIn Data Scraping:**
|
101 |
+
- Utilizing Selenium and a Webdriver automated test tool, this feature enables users to input job titles, automating the data scraping process from LinkedIn. The scraped data includes crucial details such as company names, job titles, locations, URLs, and comprehensive job descriptions.
|
102 |
+
- This streamlined process enables users to easily review scraped job details and apply for positions, simplifying their job search and application experience.
|
103 |
+
|
104 |
+
<br />
|
105 |
+
|
106 |
+
|
107 |
+
|
108 |
+
<br />
|
109 |
+
|
110 |
+
**Contributing**
|
111 |
+
|
112 |
+
Contributions to this project are welcome! If you encounter any issues or have suggestions for improvements, please feel free to submit a pull request.
|
113 |
+
|
114 |
+
<br />
|
115 |
+
|
116 |
+
**License**
|
117 |
+
|
118 |
+
This project is licensed under the MIT License. Please review the LICENSE file for more details.
|
119 |
+
|
120 |
+
<br />
|
121 |
+
|
122 |
+
**Contact**
|
123 |
+
|
124 |
+
📧 Email: [email protected]
|
125 |
+
|
126 |
+
|
127 |
+
|
128 |
+
For any further questions or inquiries, feel free to reach out. We are happy to assist you with any queries.
|
129 |
+
|
Thumbnail.jpg
ADDED
![]() |
__pycache__/chat.cpython-311.pyc
ADDED
Binary file (2.95 kB). View file
|
|
__pycache__/config.cpython-311.pyc
ADDED
Binary file (220 Bytes). View file
|
|
app.py
ADDED
@@ -0,0 +1,523 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
|
2 |
+
|
3 |
+
import time
|
4 |
+
import numpy as np
|
5 |
+
import pandas as pd
|
6 |
+
import streamlit as st
|
7 |
+
from streamlit_option_menu import option_menu
|
8 |
+
from streamlit_extras.add_vertical_space import add_vertical_space
|
9 |
+
from PyPDF2 import PdfReader
|
10 |
+
from langchain.text_splitter import RecursiveCharacterTextSplitter
|
11 |
+
from langchain.embeddings.openai import OpenAIEmbeddings
|
12 |
+
from langchain.vectorstores import FAISS
|
13 |
+
from langchain.chat_models import ChatOpenAI
|
14 |
+
from langchain.chains.question_answering import load_qa_chain
|
15 |
+
from langchain.memory import ConversationBufferMemory
|
16 |
+
from langchain.chains import ConversationChain
|
17 |
+
from selenium import webdriver
|
18 |
+
from selenium.webdriver.common.by import By
|
19 |
+
from selenium.webdriver.common.keys import Keys
|
20 |
+
from selenium.common.exceptions import NoSuchElementException
|
21 |
+
|
22 |
+
import warnings
|
23 |
+
warnings.filterwarnings('ignore')
|
24 |
+
|
25 |
+
# Get OpenAI API key from secrets
|
26 |
+
def get_openai_api_key():
|
27 |
+
try:
|
28 |
+
key = st.secrets["api_keys"]["openai_api_key"]
|
29 |
+
return key
|
30 |
+
except Exception:
|
31 |
+
if "openai_api_key" in st.session_state:
|
32 |
+
return st.session_state["openai_api_key"]
|
33 |
+
|
34 |
+
st.error("OpenAI API key not found. Please check your secrets configuration.")
|
35 |
+
return None
|
36 |
+
|
37 |
+
|
38 |
+
def streamlit_config():
|
39 |
+
st.set_page_config(page_title='Talent Track By AI', layout="wide")
|
40 |
+
page_background_color = """
|
41 |
+
<style>
|
42 |
+
[data-testid="stHeader"]
|
43 |
+
{
|
44 |
+
background: rgba(0,0,0,0);
|
45 |
+
}
|
46 |
+
</style>
|
47 |
+
"""
|
48 |
+
st.markdown(page_background_color, unsafe_allow_html=True)
|
49 |
+
st.markdown(f'<h1 style="text-align: center;">Talent Track By AI</h1>', unsafe_allow_html=True)
|
50 |
+
|
51 |
+
class resume_analyzer:
|
52 |
+
def pdf_to_chunks(pdf):
|
53 |
+
pdf_reader = PdfReader(pdf)
|
54 |
+
text = ""
|
55 |
+
for page in pdf_reader.pages:
|
56 |
+
text += page.extract_text()
|
57 |
+
text_splitter = RecursiveCharacterTextSplitter(
|
58 |
+
chunk_size=700,
|
59 |
+
chunk_overlap=200,
|
60 |
+
length_function=len)
|
61 |
+
chunks = text_splitter.split_text(text=text)
|
62 |
+
return chunks
|
63 |
+
|
64 |
+
def openai(chunks, analyze):
|
65 |
+
openai_api_key = get_openai_api_key()
|
66 |
+
if not openai_api_key:
|
67 |
+
st.error("OpenAI API key not found. Please check your secrets configuration.")
|
68 |
+
return None
|
69 |
+
|
70 |
+
embeddings = OpenAIEmbeddings(openai_api_key=openai_api_key)
|
71 |
+
vectorstores = FAISS.from_texts(chunks, embedding=embeddings)
|
72 |
+
docs = vectorstores.similarity_search(query=analyze, k=3)
|
73 |
+
llm = ChatOpenAI(model='gpt-3.5-turbo', openai_api_key=openai_api_key)
|
74 |
+
chain = load_qa_chain(llm=llm, chain_type='stuff')
|
75 |
+
response = chain.run(input_documents=docs, question=analyze)
|
76 |
+
return response
|
77 |
+
|
78 |
+
def summary_prompt(query_with_chunks):
|
79 |
+
query = f''' need to detailed summarization of below resume and finally conclude them
|
80 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
81 |
+
{query_with_chunks}
|
82 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
83 |
+
'''
|
84 |
+
return query
|
85 |
+
|
86 |
+
def resume_summary():
|
87 |
+
with st.form(key='Summary'):
|
88 |
+
add_vertical_space(1)
|
89 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
90 |
+
add_vertical_space(2)
|
91 |
+
submit = st.form_submit_button(label='Submit')
|
92 |
+
add_vertical_space(1)
|
93 |
+
|
94 |
+
add_vertical_space(3)
|
95 |
+
if submit:
|
96 |
+
if pdf is not None:
|
97 |
+
try:
|
98 |
+
with st.spinner('Processing...'):
|
99 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
100 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
101 |
+
summary = resume_analyzer.openai(chunks=pdf_chunks, analyze=summary_prompt)
|
102 |
+
if summary:
|
103 |
+
st.markdown(f'<h4 style="color: orange;">Summary:</h4>', unsafe_allow_html=True)
|
104 |
+
st.write(summary)
|
105 |
+
except Exception as e:
|
106 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
107 |
+
elif pdf is None:
|
108 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
109 |
+
|
110 |
+
def strength_prompt(query_with_chunks):
|
111 |
+
query = f'''need to detailed analysis and explain of the strength of below resume and finally conclude them
|
112 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
113 |
+
{query_with_chunks}
|
114 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
115 |
+
'''
|
116 |
+
return query
|
117 |
+
|
118 |
+
def resume_strength():
|
119 |
+
with st.form(key='Strength'):
|
120 |
+
add_vertical_space(1)
|
121 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
122 |
+
add_vertical_space(2)
|
123 |
+
submit = st.form_submit_button(label='Submit')
|
124 |
+
add_vertical_space(1)
|
125 |
+
|
126 |
+
add_vertical_space(3)
|
127 |
+
if submit:
|
128 |
+
if pdf is not None:
|
129 |
+
try:
|
130 |
+
with st.spinner('Processing...'):
|
131 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
132 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
133 |
+
summary = resume_analyzer.openai(chunks=pdf_chunks, analyze=summary_prompt)
|
134 |
+
if summary:
|
135 |
+
strength_prompt = resume_analyzer.strength_prompt(query_with_chunks=summary)
|
136 |
+
strength = resume_analyzer.openai(chunks=pdf_chunks, analyze=strength_prompt)
|
137 |
+
if strength:
|
138 |
+
st.markdown(f'<h4 style="color: orange;">Strength:</h4>', unsafe_allow_html=True)
|
139 |
+
st.write(strength)
|
140 |
+
except Exception as e:
|
141 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
142 |
+
elif pdf is None:
|
143 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
144 |
+
|
145 |
+
def weakness_prompt(query_with_chunks):
|
146 |
+
query = f'''need to detailed analysis and explain of the weakness of below resume and how to improve make a better resume.
|
147 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
148 |
+
{query_with_chunks}
|
149 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
150 |
+
'''
|
151 |
+
return query
|
152 |
+
def resume_weakness():
|
153 |
+
with st.form(key='Weakness'):
|
154 |
+
add_vertical_space(1)
|
155 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
156 |
+
add_vertical_space(2)
|
157 |
+
submit = st.form_submit_button(label='Submit')
|
158 |
+
add_vertical_space(1)
|
159 |
+
|
160 |
+
add_vertical_space(3)
|
161 |
+
if submit:
|
162 |
+
if pdf is not None:
|
163 |
+
try:
|
164 |
+
with st.spinner('Processing...'):
|
165 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
166 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
167 |
+
summary = resume_analyzer.openai(chunks=pdf_chunks, analyze=summary_prompt)
|
168 |
+
if summary:
|
169 |
+
weakness_prompt = resume_analyzer.weakness_prompt(query_with_chunks=summary)
|
170 |
+
weakness = resume_analyzer.openai(chunks=pdf_chunks, analyze=weakness_prompt)
|
171 |
+
if weakness:
|
172 |
+
st.markdown(f'<h4 style="color: orange;">Weakness and Suggestions:</h4>', unsafe_allow_html=True)
|
173 |
+
st.write(weakness)
|
174 |
+
except Exception as e:
|
175 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
176 |
+
elif pdf is None:
|
177 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
178 |
+
|
179 |
+
def job_title_prompt(query_with_chunks):
|
180 |
+
query = f''' what are the job roles i apply to likedin based on below?
|
181 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
182 |
+
{query_with_chunks}
|
183 |
+
"""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""
|
184 |
+
'''
|
185 |
+
return query
|
186 |
+
|
187 |
+
def job_title_suggestion():
|
188 |
+
with st.form(key='Job Titles'):
|
189 |
+
add_vertical_space(1)
|
190 |
+
pdf = st.file_uploader(label='Upload Your Resume', type='pdf')
|
191 |
+
add_vertical_space(2)
|
192 |
+
submit = st.form_submit_button(label='Submit')
|
193 |
+
add_vertical_space(1)
|
194 |
+
|
195 |
+
add_vertical_space(3)
|
196 |
+
if submit:
|
197 |
+
if pdf is not None:
|
198 |
+
try:
|
199 |
+
with st.spinner('Processing...'):
|
200 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
201 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
202 |
+
summary = resume_analyzer.openai(chunks=pdf_chunks, analyze=summary_prompt)
|
203 |
+
if summary:
|
204 |
+
job_title_prompt = resume_analyzer.job_title_prompt(query_with_chunks=summary)
|
205 |
+
job_title = resume_analyzer.openai(chunks=pdf_chunks, analyze=job_title_prompt)
|
206 |
+
if job_title:
|
207 |
+
st.markdown(f'<h4 style="color: orange;">Job Titles:</h4>', unsafe_allow_html=True)
|
208 |
+
st.write(job_title)
|
209 |
+
except Exception as e:
|
210 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
211 |
+
elif pdf is None:
|
212 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Please Upload Your Resume</h5>', unsafe_allow_html=True)
|
213 |
+
|
214 |
+
class linkedin_scraper:
|
215 |
+
def webdriver_setup():
|
216 |
+
options = webdriver.ChromeOptions()
|
217 |
+
options.add_argument('--headless')
|
218 |
+
options.add_argument('--no-sandbox')
|
219 |
+
options.add_argument('--disable-dev-shm-usage')
|
220 |
+
driver = webdriver.Chrome(options=options)
|
221 |
+
driver.maximize_window()
|
222 |
+
return driver
|
223 |
+
|
224 |
+
def get_userinput():
|
225 |
+
add_vertical_space(2)
|
226 |
+
with st.form(key='linkedin_scarp'):
|
227 |
+
add_vertical_space(1)
|
228 |
+
col1,col2,col3 = st.columns([0.5,0.3,0.2], gap='medium')
|
229 |
+
with col1:
|
230 |
+
job_title_input = st.text_input(label='Job Title')
|
231 |
+
job_title_input = job_title_input.split(',')
|
232 |
+
with col2:
|
233 |
+
job_location = st.text_input(label='Job Location', value='India')
|
234 |
+
with col3:
|
235 |
+
job_count = st.number_input(label='Job Count', min_value=1, value=1, step=1)
|
236 |
+
add_vertical_space(1)
|
237 |
+
submit = st.form_submit_button(label='Submit')
|
238 |
+
add_vertical_space(1)
|
239 |
+
return job_title_input, job_location, job_count, submit
|
240 |
+
|
241 |
+
def build_url(job_title, job_location):
|
242 |
+
b = []
|
243 |
+
for i in job_title:
|
244 |
+
x = i.split()
|
245 |
+
y = '%20'.join(x)
|
246 |
+
b.append(y)
|
247 |
+
job_title = '%2C%20'.join(b)
|
248 |
+
link = f"https://in.linkedin.com/jobs/search?keywords={job_title}&location={job_location}&locationId=&geoId=102713980&f_TPR=r604800&position=1&pageNum=0"
|
249 |
+
return link
|
250 |
+
|
251 |
+
def open_link(driver, link):
|
252 |
+
while True:
|
253 |
+
try:
|
254 |
+
driver.get(link)
|
255 |
+
driver.implicitly_wait(5)
|
256 |
+
time.sleep(3)
|
257 |
+
driver.find_element(by=By.CSS_SELECTOR, value='span.switcher-tabs__placeholder-text.m-auto')
|
258 |
+
return
|
259 |
+
except NoSuchElementException:
|
260 |
+
continue
|
261 |
+
|
262 |
+
def link_open_scrolldown(driver, link, job_count):
|
263 |
+
linkedin_scraper.open_link(driver, link)
|
264 |
+
for i in range(0,job_count):
|
265 |
+
body = driver.find_element(by=By.TAG_NAME, value='body')
|
266 |
+
body.send_keys(Keys.PAGE_UP)
|
267 |
+
try:
|
268 |
+
driver.find_element(by=By.CSS_SELECTOR,
|
269 |
+
value="button[data-tracking-control-name='public_jobs_contextual-sign-in-modal_modal_dismiss']>icon>svg").click()
|
270 |
+
except:
|
271 |
+
pass
|
272 |
+
driver.execute_script("window.scrollTo(0, document.body.scrollHeight);")
|
273 |
+
driver.implicitly_wait(2)
|
274 |
+
try:
|
275 |
+
x = driver.find_element(by=By.CSS_SELECTOR, value="button[aria-label='See more jobs']").click()
|
276 |
+
driver.implicitly_wait(5)
|
277 |
+
except:
|
278 |
+
pass
|
279 |
+
|
280 |
+
def job_title_filter(scrap_job_title, user_job_title_input):
|
281 |
+
user_input = [i.lower().strip() for i in user_job_title_input]
|
282 |
+
scrap_title = [i.lower().strip() for i in [scrap_job_title]]
|
283 |
+
confirmation_count = 0
|
284 |
+
for i in user_input:
|
285 |
+
if all(j in scrap_title[0] for j in i.split()):
|
286 |
+
confirmation_count += 1
|
287 |
+
if confirmation_count > 0:
|
288 |
+
return scrap_job_title
|
289 |
+
else:
|
290 |
+
return np.nan
|
291 |
+
|
292 |
+
def scrap_company_data(driver, job_title_input, job_location):
|
293 |
+
company = driver.find_elements(by=By.CSS_SELECTOR, value='h4[class="base-search-card__subtitle"]')
|
294 |
+
company_name = [i.text for i in company]
|
295 |
+
location = driver.find_elements(by=By.CSS_SELECTOR, value='span[class="job-search-card__location"]')
|
296 |
+
company_location = [i.text for i in location]
|
297 |
+
title = driver.find_elements(by=By.CSS_SELECTOR, value='h3[class="base-search-card__title"]')
|
298 |
+
job_title = [i.text for i in title]
|
299 |
+
url = driver.find_elements(by=By.XPATH, value='//a[contains(@href, "/jobs/")]')
|
300 |
+
website_url = [i.get_attribute('href') for i in url]
|
301 |
+
df = pd.DataFrame(company_name, columns=['Company Name'])
|
302 |
+
df['Job Title'] = pd.DataFrame(job_title)
|
303 |
+
df['Location'] = pd.DataFrame(company_location)
|
304 |
+
df['Website URL'] = pd.DataFrame(website_url)
|
305 |
+
df['Job Title'] = df['Job Title'].apply(lambda x: linkedin_scraper.job_title_filter(x, job_title_input))
|
306 |
+
df['Location'] = df['Location'].apply(lambda x: x if job_location.lower() in x.lower() else np.nan)
|
307 |
+
df = df.dropna()
|
308 |
+
df.reset_index(drop=True, inplace=True)
|
309 |
+
return df
|
310 |
+
|
311 |
+
def scrap_job_description(driver, df, job_count):
|
312 |
+
website_url = df['Website URL'].tolist()
|
313 |
+
job_description = []
|
314 |
+
description_count = 0
|
315 |
+
for i in range(0, len(website_url)):
|
316 |
+
try:
|
317 |
+
linkedin_scraper.open_link(driver, website_url[i])
|
318 |
+
driver.find_element(by=By.CSS_SELECTOR, value='button[data-tracking-control-name="public_jobs_show-more-html-btn"]').click()
|
319 |
+
driver.implicitly_wait(5)
|
320 |
+
time.sleep(1)
|
321 |
+
description = driver.find_elements(by=By.CSS_SELECTOR, value='div[class="show-more-less-html__markup relative overflow-hidden"]')
|
322 |
+
data = [i.text for i in description][0]
|
323 |
+
if len(data.strip()) > 0 and data not in job_description:
|
324 |
+
job_description.append(data)
|
325 |
+
description_count += 1
|
326 |
+
else:
|
327 |
+
job_description.append('Description Not Available')
|
328 |
+
except:
|
329 |
+
job_description.append('Description Not Available')
|
330 |
+
if description_count == job_count:
|
331 |
+
break
|
332 |
+
df = df.iloc[:len(job_description), :]
|
333 |
+
df['Job Description'] = pd.DataFrame(job_description, columns=['Description'])
|
334 |
+
df['Job Description'] = df['Job Description'].apply(lambda x: np.nan if x=='Description Not Available' else x)
|
335 |
+
df = df.dropna()
|
336 |
+
df.reset_index(drop=True, inplace=True)
|
337 |
+
return df
|
338 |
+
|
339 |
+
def display_data_userinterface(df_final):
|
340 |
+
add_vertical_space(1)
|
341 |
+
if len(df_final) > 0:
|
342 |
+
for i in range(0, len(df_final)):
|
343 |
+
st.markdown(f'<h3 style="color: orange;">Job Posting Details : {i+1}</h3>', unsafe_allow_html=True)
|
344 |
+
st.write(f"Company Name : {df_final.iloc[i,0]}")
|
345 |
+
st.write(f"Job Title : {df_final.iloc[i,1]}")
|
346 |
+
st.write(f"Location : {df_final.iloc[i,2]}")
|
347 |
+
st.write(f"Website URL : {df_final.iloc[i,3]}")
|
348 |
+
with st.expander(label='Job Desription'):
|
349 |
+
st.write(df_final.iloc[i, 4])
|
350 |
+
add_vertical_space(3)
|
351 |
+
else:
|
352 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">No Matching Jobs Found</h5>',
|
353 |
+
unsafe_allow_html=True)
|
354 |
+
|
355 |
+
def main():
|
356 |
+
driver = None
|
357 |
+
try:
|
358 |
+
job_title_input, job_location, job_count, submit = linkedin_scraper.get_userinput()
|
359 |
+
add_vertical_space(2)
|
360 |
+
if submit:
|
361 |
+
if job_title_input != [] and job_location != '':
|
362 |
+
with st.spinner('Chrome Webdriver Setup Initializing...'):
|
363 |
+
driver = linkedin_scraper.webdriver_setup()
|
364 |
+
with st.spinner('Loading More Job Listings...'):
|
365 |
+
link = linkedin_scraper.build_url(job_title_input, job_location)
|
366 |
+
linkedin_scraper.link_open_scrolldown(driver, link, job_count)
|
367 |
+
with st.spinner('scraping Job Details...'):
|
368 |
+
df = linkedin_scraper.scrap_company_data(driver, job_title_input, job_location)
|
369 |
+
df_final = linkedin_scraper.scrap_job_description(driver, df, job_count)
|
370 |
+
linkedin_scraper.display_data_userinterface(df_final)
|
371 |
+
elif job_title_input == []:
|
372 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Job Title is Empty</h5>',
|
373 |
+
unsafe_allow_html=True)
|
374 |
+
elif job_location == '':
|
375 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">Job Location is Empty</h5>',
|
376 |
+
unsafe_allow_html=True)
|
377 |
+
except Exception as e:
|
378 |
+
add_vertical_space(2)
|
379 |
+
st.markdown(f'<h5 style="text-align: center;color: orange;">{e}</h5>', unsafe_allow_html=True)
|
380 |
+
finally:
|
381 |
+
if driver:
|
382 |
+
driver.quit()
|
383 |
+
|
384 |
+
class career_chatbot:
|
385 |
+
def initialize_session_state():
|
386 |
+
# Initialize session state variables for the chatbot
|
387 |
+
if "messages" not in st.session_state:
|
388 |
+
st.session_state.messages = [
|
389 |
+
{"role": "assistant", "content": "I'm your Career & Resume Assistant! Ask me anything about job searching, resume writing, interview preparation, or career development."}
|
390 |
+
]
|
391 |
+
|
392 |
+
if "conversation_memory" not in st.session_state:
|
393 |
+
st.session_state.conversation_memory = ConversationBufferMemory(return_messages=True)
|
394 |
+
|
395 |
+
if "resume_data" not in st.session_state:
|
396 |
+
st.session_state.resume_data = None
|
397 |
+
|
398 |
+
def setup_chatbot_ui():
|
399 |
+
with st.container():
|
400 |
+
st.markdown(f'<h3 style="color: orange; text-align: center;">Career Advisor Chatbot</h3>', unsafe_allow_html=True)
|
401 |
+
|
402 |
+
# Option to upload resume to provide context for the chatbot
|
403 |
+
with st.expander("Upload Resume for Context (Optional)"):
|
404 |
+
pdf = st.file_uploader(label='Upload Resume', type='pdf', key="chatbot_resume")
|
405 |
+
if pdf is not None and st.button("Process Resume"):
|
406 |
+
with st.spinner('Processing resume for context...'):
|
407 |
+
try:
|
408 |
+
pdf_chunks = resume_analyzer.pdf_to_chunks(pdf)
|
409 |
+
summary_prompt = resume_analyzer.summary_prompt(query_with_chunks=pdf_chunks)
|
410 |
+
summary = resume_analyzer.openai(chunks=pdf_chunks, analyze=summary_prompt)
|
411 |
+
if summary:
|
412 |
+
st.session_state.resume_data = summary
|
413 |
+
st.success("Resume processed successfully! The chatbot now has context from your resume.")
|
414 |
+
except Exception as e:
|
415 |
+
st.error(f"Error processing resume: {e}")
|
416 |
+
|
417 |
+
# Display chat messages
|
418 |
+
for message in st.session_state.messages:
|
419 |
+
with st.chat_message(message["role"]):
|
420 |
+
st.write(message["content"])
|
421 |
+
|
422 |
+
def create_system_prompt():
|
423 |
+
base_prompt = """You are a specialized career and job-search assistant. Your expertise is limited to:
|
424 |
+
1. Resume writing, analysis, and improvement
|
425 |
+
2. Job search strategies and techniques
|
426 |
+
3. Interview preparation and tips
|
427 |
+
4. Career development advice
|
428 |
+
5. LinkedIn profile optimization
|
429 |
+
6. Professional networking guidance
|
430 |
+
7. Salary negotiation tactics
|
431 |
+
8. Professional skill development recommendations
|
432 |
+
|
433 |
+
Answer questions ONLY related to these topics. For any off-topic questions, politely redirect the conversation back to career-related topics.
|
434 |
+
Your responses should be helpful, specific, and actionable. Use bullet points for clarity when appropriate.
|
435 |
+
"""
|
436 |
+
|
437 |
+
# Add resume context if available
|
438 |
+
if st.session_state.resume_data:
|
439 |
+
resume_context = f"\nThe user has provided a resume with the following information:\n{st.session_state.resume_data}\n\nUse this context to provide personalized advice when relevant."
|
440 |
+
return base_prompt + resume_context
|
441 |
+
else:
|
442 |
+
return base_prompt
|
443 |
+
|
444 |
+
def process_user_input():
|
445 |
+
openai_api_key = get_openai_api_key()
|
446 |
+
if not openai_api_key:
|
447 |
+
st.error("OpenAI API key not found. Please check your secrets configuration.")
|
448 |
+
return
|
449 |
+
|
450 |
+
# Get user input and clear the input box
|
451 |
+
user_input = st.chat_input("Ask me about careers, job search, or resume advice...")
|
452 |
+
|
453 |
+
if user_input:
|
454 |
+
# Add user message to chat history
|
455 |
+
st.session_state.messages.append({"role": "user", "content": user_input})
|
456 |
+
|
457 |
+
# Display user message
|
458 |
+
with st.chat_message("user"):
|
459 |
+
st.write(user_input)
|
460 |
+
|
461 |
+
# Generate response using the chatbot
|
462 |
+
try:
|
463 |
+
with st.spinner("Thinking..."):
|
464 |
+
llm = ChatOpenAI(model='gpt-3.5-turbo', openai_api_key=openai_api_key)
|
465 |
+
|
466 |
+
# Update conversation memory
|
467 |
+
st.session_state.conversation_memory.chat_memory.add_user_message(user_input)
|
468 |
+
|
469 |
+
system_prompt = career_chatbot.create_system_prompt()
|
470 |
+
chat_history = st.session_state.conversation_memory.buffer
|
471 |
+
|
472 |
+
# Format prompt with system instructions and context
|
473 |
+
prompt = f"""
|
474 |
+
{system_prompt}
|
475 |
+
|
476 |
+
Chat History: {chat_history}
|
477 |
+
|
478 |
+
Human: {user_input}
|
479 |
+
Assistant:"""
|
480 |
+
|
481 |
+
response = llm.predict(prompt)
|
482 |
+
|
483 |
+
# Add assistant response to memory
|
484 |
+
st.session_state.conversation_memory.chat_memory.add_ai_message(response)
|
485 |
+
|
486 |
+
# Add assistant response to chat history
|
487 |
+
st.session_state.messages.append({"role": "assistant", "content": response})
|
488 |
+
|
489 |
+
# Display assistant response
|
490 |
+
with st.chat_message("assistant"):
|
491 |
+
st.write(response)
|
492 |
+
|
493 |
+
except Exception as e:
|
494 |
+
error_msg = f"Error generating response: {str(e)}"
|
495 |
+
st.error(error_msg)
|
496 |
+
st.session_state.messages.append({"role": "assistant", "content": "I'm sorry, I encountered an error. Please try again."})
|
497 |
+
|
498 |
+
def main():
|
499 |
+
career_chatbot.initialize_session_state()
|
500 |
+
career_chatbot.setup_chatbot_ui()
|
501 |
+
career_chatbot.process_user_input()
|
502 |
+
|
503 |
+
# Streamlit Configuration Setup
|
504 |
+
streamlit_config()
|
505 |
+
add_vertical_space(2)
|
506 |
+
|
507 |
+
with st.sidebar:
|
508 |
+
add_vertical_space(4)
|
509 |
+
option = option_menu(menu_title='', options=['Summary', 'Strength', 'Weakness', 'Job Titles', 'Linkedin Jobs', 'Career Chat'],
|
510 |
+
icons=['house-fill', 'database-fill', 'pass-fill', 'list-ul', 'linkedin', 'chat-dots-fill'])
|
511 |
+
|
512 |
+
if option == 'Summary':
|
513 |
+
resume_analyzer.resume_summary()
|
514 |
+
elif option == 'Strength':
|
515 |
+
resume_analyzer.resume_strength()
|
516 |
+
elif option == 'Weakness':
|
517 |
+
resume_analyzer.resume_weakness()
|
518 |
+
elif option == 'Job Titles':
|
519 |
+
resume_analyzer.job_title_suggestion()
|
520 |
+
elif option == 'Linkedin Jobs':
|
521 |
+
linkedin_scraper.main()
|
522 |
+
elif option == 'Career Chat':
|
523 |
+
career_chatbot.main()
|
config.py
ADDED
File without changes
|
packages.txt
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
chromium-driver
|
requirements.txt
ADDED
@@ -0,0 +1,11 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
numpy
|
2 |
+
pandas
|
3 |
+
streamlit
|
4 |
+
streamlit_option_menu
|
5 |
+
streamlit_extras
|
6 |
+
PyPDF2
|
7 |
+
langchain==0.0.302
|
8 |
+
openai
|
9 |
+
tiktoken
|
10 |
+
faiss-cpu
|
11 |
+
selenium
|