Spaces:
Paused
Paused
File size: 773 Bytes
797cdd1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 |
################## DISCOVERING PHASE #############################
import streamlit as st
from transformers import AutoTokenizer, pipeline, logging
from auto_gptq import AutoGPTQForCausalLM, BaseQuantizeConfig
from huggingface_hub import snapshot_download
#import shutil
import os
cwd = os.getcwd()
cachedir = cwd+'/cache'
# Check if the directory exists before creating it
if not os.path.exists(cachedir):
os.mkdir(cachedir)
os.environ['HF_HOME'] = cachedir
local_folder = cachedir + "/model"
quantized_model_dir = "FPHam/Jackson_The_Formalizer_V2_13b_GPTQ"
snapshot_download(repo_id=quantized_model_dir, local_dir=local_folder, local_dir_use_symlinks=True)
model_basename = cachedir + "/model/Jackson2-4bit-128g-GPTQ"
use_strict = False
use_triton = False
|