import datetime import json import os import gradio as gr import huggingface_hub as hfh from apscheduler.schedulers.background import BackgroundScheduler DATASET_ID = "albertvillanova/datasets-report" DATASET_PATH = "dataset" DATA_DIR = "data" DATA_PATH = f"{DATASET_PATH}{DATA_DIR}" def pull_dataset_repo(repo_id=DATASET_ID, repo_path=DATASET_PATH): token = os.environ.get('HUB_TOKEN') repo = hfh.Repository( local_dir=repo_path, clone_from=repo_id, repo_type="dataset", use_auth_token=token, ) repo.git_pull() return repo def greet(name): return "Hello " + name + "!!" # iface = gr.Interface(fn=greet, inputs="text", outputs="text") # iface.launch() def update_datasets(): # Retrieve datasets datasets = hfh.list_datasets() # Save dataset IDs repo = pull_dataset_repo() os.makedirs(DATA_PATH, exist_ok=True) today = datetime.datetime.now(datetime.timezone.utc).date().isoformat() with repo.commit(f"Add {today} data file"): with open(f"data/{today}.json", "w") as f: json.dump([ds.id for ds in sorted(datasets, key=lambda item: item.id)], f) scheduler = BackgroundScheduler() scheduler.add_job(update_datasets, trigger="cron", hour=0, minute=1, timezone=datetime.timezone.utc) scheduler.start() demo = gr.Interface(fn=greet, inputs="text", outputs="text") demo.launch()