Hady Rashwan commited on
Commit
dcce17e
·
1 Parent(s): 1ec14e1

remove the need for langchain and hugging faces and use request

Browse files
Files changed (3) hide show
  1. app.py +38 -35
  2. requirements.txt +0 -1
  3. setup_requirements.txt +7 -0
app.py CHANGED
@@ -2,7 +2,6 @@ import streamlit as st
2
  import requests
3
  import datetime
4
  import os
5
- from huggingface_hub import InferenceClient
6
  from sentence_transformers import SentenceTransformer
7
  from supabase import create_client, Client
8
  from dotenv import load_dotenv
@@ -18,26 +17,50 @@ SUPABASE_URL = os.getenv("SUPABASE_URL")
18
  SUPABASE_KEY = os.getenv("SUPABASE_KEY")
19
 
20
  # Initialize the Hugging Face Inference Client
21
- client = InferenceClient(token=HF_API_KEY)
 
 
22
 
23
  # Initialize Supabase
24
  supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
25
 
26
  model = SentenceTransformer('thenlper/gte-small')
27
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
28
  def generate_outfit_image(clothing_suggestion):
29
  prompt = f"A fashion illustration showing an outfit with {clothing_suggestion}. Stylized, colorful, no text."
30
 
31
- # Generate image using Stable Diffusion via Hugging Face
32
- image_bytes = client.text_to_image(
33
- prompt,
34
- model="stabilityai/stable-diffusion-2-1",
35
- negative_prompt="blurry, low quality, text, words, labels",
36
- )
37
-
38
- # # Convert bytes to PIL Image
39
- # image = Image.open(BytesIO(image_bytes))
40
- return image_bytes
41
 
42
  def get_weather(city):
43
  base_url = "http://api.openweathermap.org/data/2.5/weather"
@@ -61,17 +84,7 @@ def get_ai_clothing_suggestion(weather_data):
61
  Make sure to stick to hugging faces free response size limit.
62
  """
63
 
64
- # Using Mistral 7B Instruct model via Hugging Face
65
- response = client.text_generation(
66
- prompt,
67
- model="mistralai/Mistral-7B-Instruct-v0.1",
68
- # max_new_tokens=150,
69
- temperature=0.7,
70
- # top_k=50,
71
- # top_p=0.95,
72
- )
73
-
74
- return response
75
 
76
  def get_ai_weather_explanation(weather_data):
77
  prompt = f"""
@@ -85,17 +98,7 @@ def get_ai_weather_explanation(weather_data):
85
  Make sure to stick to hugging faces free response size limit.
86
  """
87
 
88
- # Using Mistral 7B Instruct model via Hugging Face
89
- response = client.text_generation(
90
- prompt,
91
- model="mistralai/Mistral-7B-Instruct-v0.1",
92
- max_new_tokens=150,
93
- temperature=0.7,
94
- top_k=50,
95
- top_p=0.95,
96
- )
97
-
98
- return response
99
 
100
  def get_relevant_quote(weather_condition):
101
  # Encode the weather condition
@@ -154,4 +157,4 @@ if st.button("Get Weather and Clothing Suggestion"):
154
  st.image(outfit_image, caption="AI-generated outfit based on the suggestion")
155
 
156
  # Display current date and time
157
- st.sidebar.write(f"Current Date and Time: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
 
2
  import requests
3
  import datetime
4
  import os
 
5
  from sentence_transformers import SentenceTransformer
6
  from supabase import create_client, Client
7
  from dotenv import load_dotenv
 
17
  SUPABASE_KEY = os.getenv("SUPABASE_KEY")
18
 
19
  # Initialize the Hugging Face Inference Client
20
+
21
+ llvm_model_url = "https://api-inference.huggingface.co/models/mistralai/Mistral-7B-Instruct-v0.3/v1/chat/completions"
22
+
23
 
24
  # Initialize Supabase
25
  supabase: Client = create_client(SUPABASE_URL, SUPABASE_KEY)
26
 
27
  model = SentenceTransformer('thenlper/gte-small')
28
 
29
+ def call_llvm_model(prompt):
30
+ payload = {
31
+ "model": "mistralai/Mistral-7B-Instruct-v0.3",
32
+ "messages": [
33
+ {
34
+ "role": "user",
35
+ "content": prompt,
36
+ }
37
+ ],
38
+ "max_tokens": 500,
39
+ "stream": False
40
+ }
41
+ headers = {
42
+ "Authorization": f"Bearer {HF_API_KEY}",
43
+ "content-type": "application/json"
44
+ }
45
+
46
+ response = requests.post(llvm_model_url, json=payload, headers=headers)
47
+
48
+ response = response.json()
49
+ return response['choices'][0]['message']['content']
50
+
51
  def generate_outfit_image(clothing_suggestion):
52
  prompt = f"A fashion illustration showing an outfit with {clothing_suggestion}. Stylized, colorful, no text."
53
 
54
+ payload = {
55
+ "inputs": prompt,
56
+ "negative_prompt":"blurry, low quality, text, words, labels"
57
+ }
58
+
59
+ API_URL = "https://api-inference.huggingface.co/models/stabilityai/stable-diffusion-2-1"
60
+ headers = {"Authorization": f"Bearer {HF_API_KEY}"}
61
+
62
+ response = requests.post(API_URL, headers=headers, json=payload)
63
+ return response.content
64
 
65
  def get_weather(city):
66
  base_url = "http://api.openweathermap.org/data/2.5/weather"
 
84
  Make sure to stick to hugging faces free response size limit.
85
  """
86
 
87
+ return call_llvm_model(prompt)
 
 
 
 
 
 
 
 
 
 
88
 
89
  def get_ai_weather_explanation(weather_data):
90
  prompt = f"""
 
98
  Make sure to stick to hugging faces free response size limit.
99
  """
100
 
101
+ return call_llvm_model(prompt)
 
 
 
 
 
 
 
 
 
 
102
 
103
  def get_relevant_quote(weather_condition):
104
  # Encode the weather condition
 
157
  st.image(outfit_image, caption="AI-generated outfit based on the suggestion")
158
 
159
  # Display current date and time
160
+ st.sidebar.write(f"Current Date and Time: {datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S')}")
requirements.txt CHANGED
@@ -2,6 +2,5 @@ streamlit==1.24.0
2
  requests==2.31.0
3
  python-dotenv==1.0.0
4
  huggingface-hub==0.16.4
5
- langchain==0.2.9
6
  sentence-transformers==3.0.1
7
  supabase==2.5.3
 
2
  requests==2.31.0
3
  python-dotenv==1.0.0
4
  huggingface-hub==0.16.4
 
5
  sentence-transformers==3.0.1
6
  supabase==2.5.3
setup_requirements.txt ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ streamlit==1.24.0
2
+ requests==2.31.0
3
+ python-dotenv==1.0.0
4
+ huggingface-hub==0.16.4
5
+ langchain==0.2.9
6
+ sentence-transformers==3.0.1
7
+ supabase==2.5.3