Transformers
English
Inference Endpoints
eliebak HF staff commited on
Commit
b5cf95e
·
verified ·
1 Parent(s): ec0c8d3

Update README.md

Browse files
Files changed (1) hide show
  1. README.md +1 -1
README.md CHANGED
@@ -15,7 +15,7 @@ We are releasing an intermediate checkpoint of SmolLM2 to enable further researc
15
  import torch
16
  from transformers import AutoModelForCausalLM, AutoTokenizer
17
  checkpoint = "HuggingFaceTB/SmolLM2-360M-intermediate-checkpoints"
18
- revision = "step-320000" # replace by the revision you want
19
  device = torch.device("cuda" if torch.cuda.is_available() else "mps" if hasattr(torch, 'mps') and torch.mps.is_available() else "cpu")
20
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, revision=revision)
21
  model = AutoModelForCausalLM.from_pretrained(checkpoint, revision=revision).to(device)
 
15
  import torch
16
  from transformers import AutoModelForCausalLM, AutoTokenizer
17
  checkpoint = "HuggingFaceTB/SmolLM2-360M-intermediate-checkpoints"
18
+ revision = "step-160000" # replace by the revision you want
19
  device = torch.device("cuda" if torch.cuda.is_available() else "mps" if hasattr(torch, 'mps') and torch.mps.is_available() else "cpu")
20
  tokenizer = AutoTokenizer.from_pretrained(checkpoint, revision=revision)
21
  model = AutoModelForCausalLM.from_pretrained(checkpoint, revision=revision).to(device)