Canstralian commited on
Commit
986d276
·
verified ·
1 Parent(s): 7b641ae

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +111 -42
app.py CHANGED
@@ -1,52 +1,121 @@
1
  import gradio as gr
2
- from huggingface_hub import InferenceClient
 
 
3
 
4
- client = InferenceClient("HuggingFaceH4/zephyr-7b-beta")
 
 
 
 
 
 
 
 
 
 
 
 
5
 
 
 
6
 
7
- def respond(message, history, system_message, max_tokens, temperature, top_p):
8
- messages = [{"role": "system", "content": system_message}]
9
-
10
- for val in history:
11
- if val[0]:
12
- messages.append({"role": "user", "content": val[0]})
13
- if val[1]:
14
- messages.append({"role": "assistant", "content": val[1]})
15
 
16
- messages.append({"role": "user", "content": message})
17
-
18
- response = ""
19
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
20
  try:
21
- for message in client.chat_completion(
22
- messages,
23
- max_tokens=max_tokens,
24
- stream=True,
25
- temperature=temperature,
26
- top_p=top_p,
27
- ):
28
- token = message.choices[0].delta.content
29
- response += token
30
- yield response
31
  except Exception as e:
32
- yield f"Error: {str(e)}"
33
-
34
-
35
- demo = gr.ChatInterface(
36
- respond,
37
- additional_inputs=[
38
- gr.Textbox(value="You are a friendly Chatbot.", label="System message"),
39
- gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
40
- gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
41
- gr.Slider(
42
- minimum=0.1,
43
- maximum=1.0,
44
- value=0.95,
45
- step=0.05,
46
- label="Top-p (nucleus sampling)",
47
- ),
48
- ],
49
- )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
50
 
51
  if __name__ == "__main__":
52
  demo.launch()
 
1
  import gradio as gr
2
+ import subprocess
3
+ import pyshark
4
+ from smolagents import Tool
5
 
6
+ # HFModelDownloadsTool Definition
7
+ class HFModelDownloadsTool(Tool):
8
+ name = "model_download_counter"
9
+ description = """
10
+ This is a tool that returns the most downloaded model of a given task on the Hugging Face Hub.
11
+ It returns the name of the checkpoint."""
12
+ inputs = {
13
+ "task": {
14
+ "type": "string",
15
+ "description": "the task category (such as text-classification, depth-estimation, etc)",
16
+ }
17
+ }
18
+ output_type = "string"
19
 
20
+ def forward(self, task: str):
21
+ from huggingface_hub import list_models
22
 
23
+ model = next(iter(list_models(filter=task, sort="downloads", direction=-1)))
24
+ return model.id
 
 
 
 
 
 
25
 
26
+ # Instantiate the tool
27
+ model_downloads_tool = HFModelDownloadsTool()
28
+
29
+ # Function to integrate HFModelDownloadsTool into Gradio
30
+ def get_most_downloaded_model(task):
31
+ if not task:
32
+ return "Error: Task cannot be empty."
33
+ try:
34
+ return model_downloads_tool.forward(task)
35
+ except Exception as e:
36
+ return f"Error: {str(e)}"
37
+
38
+ # Other functions (Nmap, Nikto, Hydra, PCAP) remain the same
39
+ def run_nmap(target):
40
+ if not target:
41
+ return "Error: Target cannot be empty."
42
+ try:
43
+ result = subprocess.run(["nmap", target], capture_output=True, text=True)
44
+ return result.stdout if result.returncode == 0 else "Error running Nmap scan."
45
+ except Exception as e:
46
+ return f"Error: {str(e)}"
47
+
48
+ def run_nikto(target):
49
+ if not target:
50
+ return "Error: Target cannot be empty."
51
+ try:
52
+ result = subprocess.run(["nikto", "-h", target], capture_output=True, text=True)
53
+ return result.stdout if result.returncode == 0 else "Error running Nikto scan."
54
+ except Exception as e:
55
+ return f"Error: {str(e)}"
56
+
57
+ def run_hydra(target, service, wordlist):
58
+ if not target or not service or not wordlist:
59
+ return "Error: Target, service, and wordlist cannot be empty."
60
  try:
61
+ result = subprocess.run(
62
+ ["hydra", "-l", "admin", "-P", wordlist, f"{service}://{target}"],
63
+ capture_output=True, text=True
64
+ )
65
+ return result.stdout if result.returncode == 0 else "Error running Hydra attack."
 
 
 
 
 
66
  except Exception as e:
67
+ return f"Error: {str(e)}"
68
+
69
+ def analyze_pcap(file_path):
70
+ if not file_path:
71
+ return "Error: Please upload a valid PCAP file."
72
+ try:
73
+ capture = pyshark.FileCapture(file_path['name'])
74
+ summary = "\n".join([str(pkt) for pkt in capture])
75
+ return f"PCAP Analysis Completed. Summary:\n{summary}"
76
+ except Exception as e:
77
+ return f"Error analyzing PCAP file: {str(e)}"
78
+
79
+ # Gradio Interface
80
+ with gr.Blocks() as demo:
81
+ gr.Markdown("## Cybersecurity Scanning Tool with Hugging Face Integration")
82
+
83
+ # Nmap Scan
84
+ with gr.Row():
85
+ nmap_target = gr.Textbox(label="Enter Target IP for Nmap Scan")
86
+ nmap_button = gr.Button("Run Nmap Scan")
87
+ nmap_result = gr.Textbox(label="Nmap Scan Results", interactive=False)
88
+ nmap_button.click(run_nmap, inputs=nmap_target, outputs=nmap_result)
89
+
90
+ # Nikto Scan
91
+ with gr.Row():
92
+ nikto_target = gr.Textbox(label="Enter Web Server URL for Nikto Scan")
93
+ nikto_button = gr.Button("Run Nikto Scan")
94
+ nikto_result = gr.Textbox(label="Nikto Scan Results", interactive=False)
95
+ nikto_button.click(run_nikto, inputs=nikto_target, outputs=nikto_result)
96
+
97
+ # Hydra Attack
98
+ with gr.Row():
99
+ hydra_target = gr.Textbox(label="Enter Target IP for Hydra Attack")
100
+ hydra_service = gr.Textbox(label="Enter Service (e.g., ssh, ftp)")
101
+ hydra_wordlist = gr.Textbox(label="Enter Path to Wordlist")
102
+ hydra_button = gr.Button("Run Hydra Attack")
103
+ hydra_result = gr.Textbox(label="Hydra Attack Results", interactive=False)
104
+ hydra_button.click(run_hydra, inputs=[hydra_target, hydra_service, hydra_wordlist], outputs=hydra_result)
105
+
106
+ # PCAP Analysis (Wireshark)
107
+ with gr.Row():
108
+ pcap_file = gr.File(label="Upload PCAP File")
109
+ pcap_button = gr.Button("Analyze PCAP File")
110
+ pcap_result = gr.Textbox(label="PCAP Analysis Results", interactive=False)
111
+ pcap_button.click(analyze_pcap, inputs=pcap_file, outputs=pcap_result)
112
+
113
+ # Hugging Face Most Downloaded Model Tool
114
+ with gr.Row():
115
+ hf_task_input = gr.Textbox(label="Enter Task Category (e.g., text-classification)")
116
+ hf_button = gr.Button("Get Most Downloaded Model")
117
+ hf_result = gr.Textbox(label="Most Downloaded Model", interactive=False)
118
+ hf_button.click(get_most_downloaded_model, inputs=hf_task_input, outputs=hf_result)
119
 
120
  if __name__ == "__main__":
121
  demo.launch()