Update README.md
Browse files
README.md
CHANGED
@@ -108,10 +108,28 @@ Users (both direct and downstream) should be made aware of the risks, biases and
|
|
108 |
|
109 |
Use the code below to get started with the model.
|
110 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
111 |
Python code for usage:
|
112 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
113 |
# ✅ Load the uploaded model
|
114 |
-
model = AutoModelForCausalLM.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-
|
115 |
tokenizer = AutoTokenizer.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-Instruct")
|
116 |
# ✅ Sample inference
|
117 |
prompt = "Patient reports chest pain and dizziness. What’s the likely diagnosis?"
|
|
|
108 |
|
109 |
Use the code below to get started with the model.
|
110 |
|
111 |
+
!pip install -q -U bitsandbytes
|
112 |
+
!pip install -q -U peft
|
113 |
+
!pip install -q -U trl
|
114 |
+
!pip install -q -U tensorboardX
|
115 |
+
!pip install -q wandb
|
116 |
+
|
117 |
+
from transformers import AutoModelForCausalLM, AutoTokenizer
|
118 |
+
|
119 |
+
# ✅ Load the uploaded model
|
120 |
+
model = AutoModelForCausalLM.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-Instruct_FullModel")
|
121 |
+
tokenizer = AutoTokenizer.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-Instruct_FullModel")
|
122 |
+
|
123 |
+
# ✅ Sample inference
|
124 |
+
prompt = "Patient reports chest pain and dizziness with nose bleeding, What’s the likely diagnosis is it cancer ?"
|
125 |
+
inputs = tokenizer(prompt, return_tensors="pt").to(model.device)
|
126 |
+
|
127 |
+
outputs = model.generate(**inputs, max_new_tokens=300)
|
128 |
+
print(tokenizer.decode(outputs[0], skip_special_tokens=True))
|
129 |
Python code for usage:
|
130 |
from transformers import AutoModelForCausalLM, AutoTokenizer
|
131 |
# ✅ Load the uploaded model
|
132 |
+
model = AutoModelForCausalLM.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-Instruct_FullModel")
|
133 |
tokenizer = AutoTokenizer.from_pretrained("ritvik77/Medical_Doctor_AI_LoRA-Mistral-7B-Instruct")
|
134 |
# ✅ Sample inference
|
135 |
prompt = "Patient reports chest pain and dizziness. What’s the likely diagnosis?"
|