FraRy commited on
Commit
6960cf4
·
verified ·
1 Parent(s): 4205fce

Deleted main.py in favor of newer versio

Browse files
Files changed (1) hide show
  1. main.py +0 -60
main.py DELETED
@@ -1,60 +0,0 @@
1
- import transformers
2
- import torch
3
- import gradio as gr
4
- from datasets import load_dataset
5
-
6
- # Remember to add access token to huggingface-cli login
7
-
8
- # Load the model once when the script starts
9
- model_id = "meta-llama/Meta-Llama-3.1-8B-Instruct"
10
-
11
- # Load the model into memory (on GPU if available)
12
- pipeline = transformers.pipeline(
13
- "text-generation",
14
- model=model_id,
15
- model_kwargs={"torch_dtype": torch.bfloat16},
16
- device_map="auto", # Auto-detect GPU
17
- )
18
-
19
- # Load the dataset from Hugging Face
20
- dataset = load_dataset("quantumminds/cisco_cli_commands")
21
-
22
- # Function to search the dataset for a matching command
23
- def search_dataset(user_input):
24
- # Check if any command in the dataset matches the user input
25
- for entry in dataset['train']: # assuming the dataset is in the 'train' split
26
- if entry["command"].lower() in user_input.lower(): # Match the command with user input (case-insensitive)
27
- return f"**Command:** {entry['command']}\n\n**Description:** {entry['description']}\n\n**Example:** {entry['examples'][0]['example_command'] if 'examples' in entry else 'No example available'}"
28
- return None # If no match found
29
-
30
- # Function to generate response using the dataset or fallback to the pipeline
31
- def generate_response(user_input):
32
- # First, try to find a match in the dataset
33
- dataset_response = search_dataset(user_input)
34
-
35
- if dataset_response:
36
- return dataset_response
37
-
38
- # If no match, fallback to the LLM
39
- messages = [
40
- {"role": "system", "content": "You are a pirate chatbot who specializes in Cisco switch and router configurations"},
41
- {"role": "user", "content": user_input},
42
- ]
43
-
44
- # Generate the response from the LLM
45
- outputs = pipeline(messages, max_new_tokens=256)
46
-
47
- # Return the generated text
48
- return outputs[0]["generated_text"]
49
-
50
- # Create Gradio interface
51
- iface = gr.Interface(
52
- fn=generate_response, # Function to call
53
- inputs=gr.Textbox(lines=2, placeholder="Enter your Cisco switch/router question here..."),
54
- outputs="text",
55
- title="Cisco Configuration Assistant", # Title for the UI
56
- description="Ask the chatbot questions about Cisco switch/router configurations",
57
- )
58
-
59
- # Launch the Gradio app
60
- iface.launch()