Spaces:
Sleeping
Sleeping
import os | |
import streamlit as st | |
from git import Repo | |
import shutil | |
from llama_index import VectorStoreIndex, SimpleDirectoryReader, ServiceContext | |
from llama_index.llms.llama_cpp import LlamaCPP | |
from llama_index.embeddings import HuggingFaceEmbedding | |
from llama_index.node_parser import SimpleNodeParser | |
st.set_page_config(page_title="π GitHub Repo Explainer", layout="wide") | |
st.title("π GitHub Repository Explainer") | |
github_url = st.text_input("Enter GitHub URL:", placeholder="https://github.com/user/repo") | |
if st.button("Analyze Repo") and github_url: | |
try: | |
# Cleanup old repo | |
if os.path.exists("repo"): | |
shutil.rmtree("repo") | |
# Clone repo | |
Repo.clone_from(github_url, "repo") | |
# Load model + embeddings | |
llm = LlamaCPP( | |
model_path="tinyllama-1.1b-chat-v1.0.Q4_K_M.gguf", | |
temperature=0.7, | |
max_new_tokens=512, | |
context_window=2048, | |
model_kwargs={"n_gpu_layers": 0}, | |
verbose=True | |
) | |
embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2") | |
service_context = ServiceContext.from_defaults(llm=llm, embed_model=embed_model) | |
# Load documents and build index | |
documents = SimpleDirectoryReader("repo").load_data() | |
parser = SimpleNodeParser() | |
nodes = parser.get_nodes_from_documents(documents) | |
index = VectorStoreIndex(nodes, service_context=service_context) | |
# Query the index | |
query_engine = index.as_query_engine() | |
response = query_engine.query("Explain the purpose, structure, and setup of this repository.") | |
st.subheader("π§ Repository Summary") | |
st.write(str(response)) | |
except Exception as e: | |
st.error(f"Error: {e}") | |