TabasumDev commited on
Commit
d5535d5
·
verified ·
1 Parent(s): 6d59cf8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +4 -4
app.py CHANGED
@@ -455,13 +455,14 @@ from peft import get_peft_model, LoraConfig, TaskType
455
  # ✅ Force CPU execution for Streamlit Cloud
456
  device = torch.device("cpu")
457
 
458
- # 🔹 Load IBM Granite Model (CPU-Compatible)
459
  MODEL_NAME = "ibm-granite/granite-3.1-2b-instruct"
460
 
461
  model = AutoModelForCausalLM.from_pretrained(
462
  MODEL_NAME,
463
- device_map="cpu", # Force CPU execution
464
- torch_dtype=torch.float32 # Use float32 since Streamlit runs on CPU
 
465
  )
466
 
467
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
@@ -580,7 +581,6 @@ if __name__ == '__main__':
580
 
581
 
582
 
583
-
584
  # import streamlit as st
585
  # from PyPDF2 import PdfReader
586
 
 
455
  # ✅ Force CPU execution for Streamlit Cloud
456
  device = torch.device("cpu")
457
 
458
+ # 🔹 Load IBM Granite Model (No Shard Checkpoints)
459
  MODEL_NAME = "ibm-granite/granite-3.1-2b-instruct"
460
 
461
  model = AutoModelForCausalLM.from_pretrained(
462
  MODEL_NAME,
463
+ device_map="cpu",
464
+ torch_dtype=torch.float32,
465
+ ignore_mismatched_sizes=True # 🚀 Fixes sharded checkpoint issues
466
  )
467
 
468
  tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME)
 
581
 
582
 
583
 
 
584
  # import streamlit as st
585
  # from PyPDF2 import PdfReader
586