Osnly commited on
Commit
7038265
·
verified ·
1 Parent(s): 583755a

Update src/streamlit_app.py

Browse files
Files changed (1) hide show
  1. src/streamlit_app.py +13 -5
src/streamlit_app.py CHANGED
@@ -1,4 +1,3 @@
1
- # app.py
2
  import streamlit as st
3
  import pandas as pd
4
  import matplotlib.pyplot as plt
@@ -11,16 +10,25 @@ from execute import execute_plan
11
  from insight import generate_insights
12
  from visual_insight import generate_visual_plan
13
  from report import ReportBuilder
14
- import os
15
  from transformers import AutoTokenizer
16
 
17
- token = os.environ.get("HUGGINGFACE_TOKEN")
 
 
 
18
 
 
 
 
 
 
19
  tokenizer = AutoTokenizer.from_pretrained(
20
  "google/gemma-3n-E4B-it",
21
- token=token, # Required to access gated models
22
- use_auth_token=True # Optional in newer transformers
23
  )
 
24
  st.set_page_config(page_title="Smart Data Cleaning Agent", layout="wide")
25
  st.title("🧠 Smart Data Cleaning Agent")
26
 
 
 
1
  import streamlit as st
2
  import pandas as pd
3
  import matplotlib.pyplot as plt
 
10
  from insight import generate_insights
11
  from visual_insight import generate_visual_plan
12
  from report import ReportBuilder
13
+
14
  from transformers import AutoTokenizer
15
 
16
+ # Use a writable cache dir to avoid permission issues on Hugging Face Spaces
17
+ HF_CACHE_DIR = "./hf_cache"
18
+ os.environ["HF_HOME"] = HF_CACHE_DIR
19
+ os.environ["TRANSFORMERS_CACHE"] = HF_CACHE_DIR
20
 
21
+ # Ensure cache directory exists
22
+ os.makedirs(HF_CACHE_DIR, exist_ok=True)
23
+
24
+ # Authenticate and load tokenizer to check access
25
+ hf_token = os.environ.get("HUGGINGFACE_TOKEN")
26
  tokenizer = AutoTokenizer.from_pretrained(
27
  "google/gemma-3n-E4B-it",
28
+ token=hf_token,
29
+ use_auth_token=True
30
  )
31
+
32
  st.set_page_config(page_title="Smart Data Cleaning Agent", layout="wide")
33
  st.title("🧠 Smart Data Cleaning Agent")
34