# importing genai library from google from google import genai from openai import OpenAI import streamlit as st import os from huggingface_hub import InferenceClient # from huggingface_hub import HfFolder # creating client for HuggingFace clientHuggingFace = InferenceClient( api_key = os.getenv("HUGGINGFACE_API_KEY") ) # creating client for OpenAI clientOpenAI = OpenAI( api_key=os.getenv('OPENAI_API_KEY') ) # creating client for gemini clientGemini = genai.Client(api_key=os.getenv('GEMINI_API_KEY')) # Function to return response def load_answer_openAI(question): completion = clientOpenAI.chat.completions.create( model="gpt-4o-mini", store=True, messages=[ {"role": "user", "content": question} ] ) return completion.choices[0].message.content def load_answer_Gemini(question): response = clientGemini.models.generate_content( model="gemini-2.0-flash", contents=question, ) return response.text def load_answer_HuggingFace(question, modelName): res = clientHuggingFace.chat.completions.create( model = modelName, messages=[ {"role": "user", "content": question} ] ) return res.choices[0].message.content # def load_answer_huggingface(question): # App UI st.set_page_config(page_title="Langchain Demo", page_icon=":robot:") st.header("Langchain Demo") def get_text(): input_text = st.text_input("Question: ", key="input") return input_text def get_modelName(): input_model = st.text_input("Model Name for HuggingFace: ", key = "input1") return input_model user_input = get_text() submitGPT = st.button("Generate with ChatGPT") submitGemini = st.button("Generate with Gemini") providedModelName = get_modelName() submitHuggingFace = st.button("Generate with HuggingFace") if submitGPT: response = load_answer_openAI(user_input) st.subheader("Answer: ") st.write(response) elif submitGemini: response = load_answer_Gemini(user_input) st.subheader("Answer: ") st.write(response) elif submitHuggingFace: response = load_answer_HuggingFace(user_input, providedModelName) st.subheader("Answer: ") st.write(response)