bogeumkim commited on
Commit
b90747c
·
1 Parent(s): 174c3d3

Modify Gradio UI

Browse files
Files changed (3) hide show
  1. app.py +67 -12
  2. modal/server.py +0 -103
  3. server.py +130 -0
app.py CHANGED
@@ -54,32 +54,87 @@
54
 
55
  # if __name__ == "__main__":
56
  # demo.launch(debug=True)
57
-
58
- import gradio as gr
59
  import requests
 
 
 
60
 
61
- MODAL_API_URL = "https://agents-mcp-hackathon--smolagents-modal-agent-readme-agen-eb6ccb.modal.run" # Replace with your deployed Modal endpoint
 
 
 
 
 
 
 
 
 
 
 
 
 
 
62
 
63
- def generate_readme_from_github(repo_url):
 
 
 
 
 
 
 
 
 
 
64
  try:
65
  response = requests.post(
66
  MODAL_API_URL,
67
- json={"repo_url": repo_url},
68
- timeout=600,
69
  )
70
  if response.status_code == 200:
71
- return response.json().get("readme", "No README generated.")
72
  else:
73
  return f"Error: {response.status_code}\n{response.text}"
74
  except Exception as e:
75
  return f"Exception: {str(e)}"
76
 
 
 
 
 
 
 
 
 
 
 
77
  with gr.Blocks() as demo:
78
- gr.Markdown("# 📝 GitHub Repo README.md Generator\nPaste a public GitHub repo link to generate a draft README.md using AI.")
79
- repo_input = gr.Textbox(label="GitHub Repository URL", placeholder="https://github.com/owner/repo")
80
- output = gr.Textbox(label="Generated README.md", lines=20)
81
- btn = gr.Button("Generate README")
82
- btn.click(generate_readme_from_github, inputs=repo_input, outputs=output)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
83
 
84
  if __name__ == "__main__":
85
  demo.launch()
 
54
 
55
  # if __name__ == "__main__":
56
  # demo.launch(debug=True)
57
+ import re
 
58
  import requests
59
+ import gradio as gr
60
+
61
+ from markdownify import markdownify
62
 
63
+ MODAL_API_URL = "https://agents-mcp-hackathon--auto-readme-agent-fastapi-app.modal.run" # Replace with your deployed Modal endpoint
64
+
65
+ # def generate_readme_from_github(repo_url):
66
+ # try:
67
+ # response = requests.post(
68
+ # MODAL_API_URL,
69
+ # json={"repo_url": repo_url},
70
+ # timeout=180,
71
+ # )
72
+ # if response.status_code == 200:
73
+ # return response.json().get("readme", "No README generated.")
74
+ # else:
75
+ # return f"Error: {response.status_code}\n{response.text}"
76
+ # except Exception as e:
77
+ # return f"Exception: {str(e)}"
78
 
79
+ # with gr.Blocks() as demo:
80
+ # gr.Markdown("# 📝 GitHub Repo → README.md Generator\nPaste a public GitHub repo link to generate a draft README.md using AI.")
81
+ # repo_input = gr.Textbox(label="GitHub Repository URL", placeholder="https://github.com/owner/repo")
82
+ # output = gr.Textbox(label="Generated README.md", lines=20)
83
+ # btn = gr.Button("Generate README")
84
+ # btn.click(generate_readme_from_github, inputs=repo_input, outputs=output)
85
+
86
+ # if __name__ == "__main__":
87
+ # demo.launch()
88
+
89
+ def generate_readme(query):
90
  try:
91
  response = requests.post(
92
  MODAL_API_URL,
93
+ json={"query": query},
94
+ timeout=120
95
  )
96
  if response.status_code == 200:
97
+ return response.json().get("result", "No result returned.")
98
  else:
99
  return f"Error: {response.status_code}\n{response.text}"
100
  except Exception as e:
101
  return f"Exception: {str(e)}"
102
 
103
+ def fetch_github_content(url: str) -> str:
104
+ try:
105
+ response = requests.get(url, timeout=20)
106
+ response.raise_for_status()
107
+ markdown_content = markdownify(response.text)
108
+ markdown_content = re.sub(r"\n{3,}", "\n\n", markdown_content)
109
+ return markdown_content
110
+ except Exception as e:
111
+ return f"Error fetching content: {str(e)}"
112
+
113
  with gr.Blocks() as demo:
114
+ gr.Markdown("# 🤖 ARA: Auto README.md Agent 📝")
115
+ repo_input = gr.Textbox(
116
+ label="GitHub Repository URL",
117
+ placeholder="Enter the GitHub repository URL (e.g. https://github.com/username/repo)"
118
+ )
119
+ readme_output = gr.Textbox(label="Generated README.md")
120
+ with gr.Row():
121
+ with gr.Column():
122
+ gr.Markdown("#### 🖼️ Preview (Markdown Rendered)")
123
+ readme_preview = gr.Markdown()
124
+ with gr.Column():
125
+ gr.Markdown("#### 📝 Markdown File (Copyable)")
126
+ readme_markdown = gr.Textbox(lines=24, label="Markdown", interactive=True, show_copy_button=True)
127
+ generate_btn = gr.Button("Generate README.md")
128
+
129
+ # When generating, fill both preview and markdown
130
+ def dual_output(readme):
131
+ return readme, readme
132
+
133
+ generate_btn.click(
134
+ generate_readme,
135
+ inputs=repo_input,
136
+ outputs=[readme_preview, readme_markdown]
137
+ )
138
 
139
  if __name__ == "__main__":
140
  demo.launch()
modal/server.py DELETED
@@ -1,103 +0,0 @@
1
- import modal
2
-
3
- image = (
4
- modal.Image.debian_slim(python_version="3.12")
5
- .pip_install(
6
- "smolagents[toolkit]",
7
- "huggingface_hub",
8
- "transformers",
9
- "duckduckgo-search",
10
- "fastapi",
11
- "uvicorn",
12
- "PyGithub",
13
- "gitpython"
14
- )
15
- )
16
-
17
- app = modal.App("smolagents-modal-agent-readme")
18
-
19
- HF_SECRET_NAME = "hf-secret"
20
- MODEL_ID = "mistralai/Mistral-7B-Instruct-v0.2"
21
-
22
- @app.function(
23
- image=image,
24
- gpu="A10G",
25
- secrets=[modal.Secret.from_name(HF_SECRET_NAME)],
26
- timeout=600,
27
- )
28
- @modal.fastapi_endpoint()
29
- def agent_server():
30
- import os
31
- from fastapi import FastAPI, HTTPException, Request
32
- import uvicorn
33
- from huggingface_hub import login as hf_login
34
- from smolagents import CodeAgent, WebSearchTool, InferenceClientModel
35
- import tempfile
36
- import shutil
37
- import subprocess
38
-
39
- hf_token = os.environ.get("HF_TOKEN")
40
- hf_login(token=hf_token)
41
-
42
- model = InferenceClientModel(
43
- model_id=MODEL_ID,
44
- provider="huggingface",
45
- token=hf_token,
46
- )
47
-
48
- agent = CodeAgent(
49
- tools=[WebSearchTool()],
50
- model=model,
51
- stream_outputs=True,
52
- )
53
-
54
- app = FastAPI()
55
-
56
- @app.post("/generate_readme")
57
- async def generate_readme(request: Request):
58
- try:
59
- data = await request.json()
60
- repo_url = data.get("repo_url")
61
- if not repo_url:
62
- raise HTTPException(status_code=400, detail="Missing 'repo_url' in request body")
63
-
64
- temp_dir = tempfile.mkdtemp()
65
- try:
66
- subprocess.run(
67
- ["git", "clone", "--depth", "1", "--branch", "main", repo_url, temp_dir],
68
- check=True
69
- )
70
-
71
- repo_summary = []
72
- for root, dirs, files in os.walk(temp_dir):
73
- depth = root[len(temp_dir):].count(os.sep)
74
- if depth > 2:
75
- continue
76
- rel_root = os.path.relpath(root, temp_dir)
77
- repo_summary.append(f"Directory: {rel_root}")
78
- for file in files:
79
- if file.endswith((".py", ".md", ".txt", ".json", ".yaml", ".yml")):
80
- file_path = os.path.join(root, file)
81
- try:
82
- with open(file_path, "r", encoding="utf-8") as f:
83
- content = f.read(500)
84
- except Exception:
85
- content = "[Could not read file content]"
86
- repo_summary.append(f"File: {file} Content Preview: {content}")
87
-
88
- prompt = (
89
- "You are an AI assistant that drafts a detailed README.md for a GitHub repository. "
90
- "Based on the following repository structure and code snippets, create a comprehensive README.md content. "
91
- "Repository summary:\n" + "\n".join(repo_summary)
92
- )
93
-
94
- readme_text = agent.run(prompt)
95
- return {"readme": readme_text}
96
- finally:
97
- shutil.rmtree(temp_dir)
98
-
99
- except Exception as e:
100
- raise HTTPException(status_code=500, detail=str(e))
101
-
102
- # Start Uvicorn in blocking mode
103
- uvicorn.run(app, host="0.0.0.0", port=8000)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
server.py ADDED
@@ -0,0 +1,130 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import modal
2
+
3
+ HF_SECRET_NAME = "hf-secret"
4
+ MODEL_ID = "mistralai/Mistral-7B-Instruct-v0.2"
5
+
6
+ image = (
7
+ modal.Image.debian_slim(python_version="3.12")
8
+ .pip_install(
9
+ "smolagents[toolkit]",
10
+ "huggingface_hub",
11
+ "transformers",
12
+ "duckduckgo-search",
13
+ "fastapi",
14
+ "uvicorn"
15
+ )
16
+ )
17
+
18
+ app = modal.App("auto-readme-agent")
19
+
20
+ @app.function(
21
+ image=image,
22
+ gpu="A10G",
23
+ secrets=[modal.Secret.from_name("hf-secret")],
24
+ timeout=180,
25
+ )
26
+ # @modal.asgi_app()
27
+ # def fastapi_app():
28
+ # from fastapi import FastAPI, HTTPException
29
+ # from pydantic import BaseModel
30
+ # from smolagents import CodeAgent, DuckDuckGoSearchTool, HfApiModel
31
+
32
+ # class AgentRequest(BaseModel):
33
+ # query: str
34
+
35
+ # # Use DuckDuckGoSearchTool as an example tool
36
+ # agent = CodeAgent(
37
+ # tools=[DuckDuckGoSearchTool()],
38
+ # model=HfApiModel(
39
+ # model_id=MODEL_ID
40
+ # ), # Uses Hugging Face Inference API with your token
41
+ # stream_outputs=True
42
+ # )
43
+
44
+ # app = FastAPI()
45
+
46
+ # @app.post("/")
47
+ # async def run_agent(req: AgentRequest):
48
+ # try:
49
+ # result = agent.run(req.query)
50
+ # return {"result": result}
51
+ # except Exception as e:
52
+ # raise HTTPException(status_code=500, detail=str(e))
53
+
54
+ # return app
55
+ @modal.asgi_app()
56
+ def fastapi_app():
57
+ from fastapi import FastAPI, HTTPException
58
+ from pydantic import BaseModel
59
+ from smolagents import CodeAgent, HfApiModel
60
+ import os
61
+ import tempfile
62
+ import shutil
63
+ import subprocess
64
+ from git import Repo
65
+
66
+ class RepoRequest(BaseModel):
67
+ repo_url: str
68
+
69
+ agent = CodeAgent(
70
+ model=HfApiModel(),
71
+ stream_outputs=True
72
+ )
73
+
74
+ app = FastAPI()
75
+
76
+ def analyze_repo(repo_path):
77
+ repo_summary = []
78
+ for root, dirs, files in os.walk(repo_path):
79
+ # Skip hidden directories
80
+ dirs[:] = [d for d in dirs if not d.startswith('.')]
81
+
82
+ rel_path = os.path.relpath(root, repo_path)
83
+ repo_summary.append(f"Directory: {rel_path}")
84
+
85
+ for file in files:
86
+ if file.endswith(('.py', '.md', '.txt', '.json', '.yaml')):
87
+ file_path = os.path.join(root, file)
88
+ try:
89
+ with open(file_path, 'r', encoding='utf-8') as f:
90
+ content = f.read(1000) # Read first 1000 characters
91
+ repo_summary.append(f" File: {file}\n Content: {content[:500]}...")
92
+ except Exception as e:
93
+ repo_summary.append(f" File: {file} [Error reading file]")
94
+ return "\n".join(repo_summary)
95
+
96
+ @app.post("/")
97
+ async def generate_readme(req: RepoRequest):
98
+ temp_dir = tempfile.mkdtemp()
99
+ try:
100
+ # Clone repository
101
+ Repo.clone_from(req.repo_url, temp_dir, branch='main', depth=1)
102
+
103
+ # Analyze repository
104
+ repo_analysis = analyze_repo(temp_dir)
105
+
106
+ # Create prompt
107
+ prompt = f"""Create a comprehensive README.md for this GitHub repository based on its structure and contents:
108
+
109
+ Repository Structure:
110
+ {repo_analysis}
111
+
112
+ The README should include:
113
+ - Project description
114
+ - Installation instructions
115
+ - Usage examples
116
+ - Contributing guidelines
117
+ - License information if available
118
+
119
+ Format using markdown with proper sections."""
120
+
121
+ # Generate README
122
+ result = agent.run(prompt)
123
+ return {"readme": result}
124
+
125
+ except Exception as e:
126
+ raise HTTPException(status_code=500, detail=str(e))
127
+ finally:
128
+ shutil.rmtree(temp_dir, ignore_errors=True)
129
+
130
+ return app