Spaces:
Runtime error
Runtime error
Commit
·
e9a8ede
1
Parent(s):
da68b17
Update app.py
Browse files
app.py
CHANGED
|
@@ -19,7 +19,12 @@ import requests
|
|
| 19 |
|
| 20 |
from huggingface_hub import hf_hub_download
|
| 21 |
|
| 22 |
-
hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./reviews.csv")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 23 |
|
| 24 |
DB_FILE = "./reviews.db"
|
| 25 |
|
|
@@ -33,8 +38,10 @@ repo = huggingface_hub.Repository(
|
|
| 33 |
)
|
| 34 |
repo.git_pull()
|
| 35 |
|
|
|
|
|
|
|
| 36 |
# Set db to latest
|
| 37 |
-
shutil.copyfile("./data/reviews01.db", DB_FILE)
|
| 38 |
|
| 39 |
# Create table if it doesn't already exist
|
| 40 |
|
|
@@ -120,13 +127,22 @@ with gr.Blocks(css=css) as demo:
|
|
| 120 |
load_data()
|
| 121 |
#return "Hello " + name + "!"
|
| 122 |
def backup_db():
|
| 123 |
-
shutil.copyfile(DB_FILE, "./
|
| 124 |
db = sqlite3.connect(DB_FILE)
|
| 125 |
reviews = db.execute("SELECT * FROM reviews").fetchall()
|
| 126 |
pd.DataFrame(reviews).to_csv("./reviews1.csv", index=False)
|
| 127 |
print("updating db")
|
| 128 |
repo.push_to_hub(blocking=False, commit_message=f"Updating data at {datetime.datetime.now()}")
|
| 129 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 130 |
#def load_data2():
|
| 131 |
# db = sqlite3.connect(DB_FILE)
|
| 132 |
# reviews, total_reviews = get_latest_reviews(db)
|
|
@@ -135,11 +151,15 @@ def backup_db():
|
|
| 135 |
# #return reviews, total_reviews
|
| 136 |
|
| 137 |
scheduler2 = BackgroundScheduler()
|
| 138 |
-
scheduler2.add_job(func=run_actr, trigger="interval", seconds=
|
| 139 |
scheduler2.start()
|
| 140 |
|
| 141 |
scheduler2 = BackgroundScheduler()
|
| 142 |
-
scheduler2.add_job(func=backup_db, trigger="interval", seconds=
|
| 143 |
scheduler2.start()
|
| 144 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 145 |
demo.launch()
|
|
|
|
| 19 |
|
| 20 |
from huggingface_hub import hf_hub_download
|
| 21 |
|
| 22 |
+
#hf_hub_download(repo_id="CogSphere/aCogSphere", filename="./reviews.csv")
|
| 23 |
+
|
| 24 |
+
from huggingface_hub import login
|
| 25 |
+
from datasets import load_dataset
|
| 26 |
+
dataset = load_dataset("csv", data_files="./data.csv")
|
| 27 |
+
|
| 28 |
|
| 29 |
DB_FILE = "./reviews.db"
|
| 30 |
|
|
|
|
| 38 |
)
|
| 39 |
repo.git_pull()
|
| 40 |
|
| 41 |
+
login(username="CognitiveScience", token=TOKEN)
|
| 42 |
+
|
| 43 |
# Set db to latest
|
| 44 |
+
#shutil.copyfile("./data/reviews01.db", DB_FILE)
|
| 45 |
|
| 46 |
# Create table if it doesn't already exist
|
| 47 |
|
|
|
|
| 127 |
load_data()
|
| 128 |
#return "Hello " + name + "!"
|
| 129 |
def backup_db():
|
| 130 |
+
shutil.copyfile(DB_FILE, "./reviews01.db")
|
| 131 |
db = sqlite3.connect(DB_FILE)
|
| 132 |
reviews = db.execute("SELECT * FROM reviews").fetchall()
|
| 133 |
pd.DataFrame(reviews).to_csv("./reviews1.csv", index=False)
|
| 134 |
print("updating db")
|
| 135 |
repo.push_to_hub(blocking=False, commit_message=f"Updating data at {datetime.datetime.now()}")
|
| 136 |
+
|
| 137 |
+
def backup_db_csv():
|
| 138 |
+
shutil.copyfile(DB_FILE, "./reviews02.db")
|
| 139 |
+
db = sqlite3.connect(DB_FILE)
|
| 140 |
+
reviews = db.execute("SELECT * FROM reviews").fetchall()
|
| 141 |
+
pd.DataFrame(reviews).to_csv("./reviews2.csv", index=False)
|
| 142 |
+
print("updating db csv")
|
| 143 |
+
dataset = load_dataset("csv", data_files="./reviews2.csv")
|
| 144 |
+
repo.push_to_hub("CognitiveScience/csdhdata", blocking=False, commit_message=f"Updating data-csv at {datetime.datetime.now()}")
|
| 145 |
+
|
| 146 |
#def load_data2():
|
| 147 |
# db = sqlite3.connect(DB_FILE)
|
| 148 |
# reviews, total_reviews = get_latest_reviews(db)
|
|
|
|
| 151 |
# #return reviews, total_reviews
|
| 152 |
|
| 153 |
scheduler2 = BackgroundScheduler()
|
| 154 |
+
scheduler2.add_job(func=run_actr, trigger="interval", seconds=90)
|
| 155 |
scheduler2.start()
|
| 156 |
|
| 157 |
scheduler2 = BackgroundScheduler()
|
| 158 |
+
scheduler2.add_job(func=backup_db, trigger="interval", seconds=15)
|
| 159 |
scheduler2.start()
|
| 160 |
|
| 161 |
+
scheduler3 = BackgroundScheduler()
|
| 162 |
+
scheduler3.add_job(func=backup_db_csv, trigger="interval", seconds=31)
|
| 163 |
+
scheduler3.start()
|
| 164 |
+
|
| 165 |
demo.launch()
|