Enoch1359 commited on
Commit
5e42426
·
verified ·
1 Parent(s): adfb157

Upload folder using huggingface_hub

Browse files
Files changed (4) hide show
  1. Dockerfile +16 -0
  2. app.py +41 -0
  3. model.joblib +3 -0
  4. requirements.txt +11 -0
Dockerfile ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.9-slim
2
+
3
+ # Set the working directory inside the container
4
+ WORKDIR /app
5
+
6
+ # Copy all files from the current directory to the container's working directory
7
+ COPY . .
8
+
9
+ # Install dependencies from the requirements file without using cache to reduce image size
10
+ RUN pip install --no-cache-dir --upgrade -r requirements.txt
11
+
12
+ # Define the command to start the application using Gunicorn with 4 worker processes
13
+ # - `-w 4`: Uses 4 worker processes for handling requests
14
+ # - `-b 0.0.0.0:7860`: Binds the server to port 7860 on all network interfaces
15
+ # - `app:app`: Runs the Flask app (assuming `app.py` contains the Flask instance named `app`)
16
+ CMD ["gunicorn", "-w", "4", "-b", "0.0.0.0:7860", "app:rental_price_predictor_api"]
app.py ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import joblib
2
+ import numpy as np
3
+ import pandas as pd
4
+ from flask import Flask, request, jsonify
5
+ super_kart_api=Flask("Superkart_price_prediction")
6
+ model=joblib.load('model.joblib')
7
+ @super_kart_api.get('/')
8
+ def home():
9
+ return "Welcome to SuperKart sales Prediction"
10
+ @super_kart_api.post('/v1/spkart_single')
11
+ def sale_pred_single():
12
+ sale_data=request.get_json()
13
+ sample={
14
+ 'Product_Weight':sale_data['Product_Weight'],
15
+ 'Product_Sugar_Content':sale_data['Product_Sugar_Content'],
16
+ 'Product_Allocated_Area':sale_data['Product_Allocated_Area'],
17
+ 'Product_Type':sale_data['Product_Type'],
18
+ 'Product_MRP':sale_data['Product_MRP'],
19
+ 'Store_Id':sale_data['Store_Id'],
20
+ 'Store_Size':sale_data['Store_Size'],
21
+ 'Store_Location_City_Type':sale_data['Store_Location_City_Type'],
22
+ 'Store_Type':sale_data['Store_Type'],
23
+ 'Store_age':sale_data['Store_age']
24
+ }
25
+ input_data=pd.DataFrame([sample])
26
+ predicted_sale=model.predict(input_data)[0]
27
+ response={'Store_Outlet':sample['Store_Id'],"Sale":round(float(predicted_sale), 2)}
28
+ return jsonify(response)
29
+
30
+ @super_kart_api.post('/v1/spkart_batch')
31
+ def sale_pred_batch():
32
+ file=request.files['file']
33
+ input_data=pd.read_csv(file)
34
+ predicted_sale=model.predict(input_data).tolist()
35
+ predicted_sales=[round(float(i))for i in predicted_sale]
36
+ sale_outlets=input_data['Store_Id'].tolist()
37
+ response=dict(zip(sale_outlets,predicted_sales))
38
+ return jsonify(response)
39
+ if __name__=='__main__':
40
+ super_kart_api.run()
41
+
model.joblib ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:83fc60775cc8ebaad7e3054cb851df466705a3e680632eb6e00f7ddedc7adc29
3
+ size 47879002
requirements.txt ADDED
@@ -0,0 +1,11 @@
 
 
 
 
 
 
 
 
 
 
 
 
1
+ pandas==2.2.2
2
+ numpy==2.0.2
3
+ scikit-learn==1.6.1
4
+ xgboost==2.1.4
5
+ joblib==1.5.1
6
+ Werkzeug==3.1.3
7
+ flask==3.1.1
8
+ gunicorn==23.0.0
9
+ requests==2.32.3
10
+ uvicorn[standard]
11
+ streamlit==1.46.1