chriswu25 commited on
Commit
77acb2c
·
verified ·
1 Parent(s): 7cefb62

creat sync_data.sh

Browse files
Files changed (1) hide show
  1. sync_data.sh +127 -0
sync_data.sh ADDED
@@ -0,0 +1,127 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+
3
+ # 检查环境变量
4
+ if [[ -z "$HF_TOKEN" ]] || [[ -z "$DATASET_ID" ]]; then
5
+ echo "Starting without backup functionality - missing HF_TOKEN or DATASET_ID"
6
+ exec /start.sh
7
+ exit 0
8
+ fi
9
+
10
+ # 激活虚拟环境
11
+ source /opt/venv/bin/activate
12
+
13
+ # 上传备份
14
+ upload_backup() {
15
+ file_path="$1"
16
+ file_name="$2"
17
+ token="$HF_TOKEN"
18
+ repo_id="$DATASET_ID"
19
+
20
+ python3 -c "
21
+ from huggingface_hub import HfApi
22
+ import sys
23
+ import os
24
+ def manage_backups(api, repo_id, max_files=50):
25
+ files = api.list_repo_files(repo_id=repo_id, repo_type='dataset')
26
+ backup_files = [f for f in files if f.startswith('vaultwarden_backup_') and f.endswith('.tar.gz')]
27
+ backup_files.sort()
28
+
29
+ if len(backup_files) >= max_files:
30
+ files_to_delete = backup_files[:(len(backup_files) - max_files + 1)]
31
+ for file_to_delete in files_to_delete:
32
+ try:
33
+ api.delete_file(path_in_repo=file_to_delete, repo_id=repo_id, repo_type='dataset')
34
+ print(f'Deleted old backup: {file_to_delete}')
35
+ except Exception as e:
36
+ print(f'Error deleting {file_to_delete}: {str(e)}')
37
+ api = HfApi(token='$token')
38
+ try:
39
+ api.upload_file(
40
+ path_or_fileobj='$file_path',
41
+ path_in_repo='$file_name',
42
+ repo_id='$repo_id',
43
+ repo_type='dataset'
44
+ )
45
+ print(f'Successfully uploaded $file_name')
46
+
47
+ manage_backups(api, '$repo_id')
48
+ except Exception as e:
49
+ print(f'Error uploading file: {str(e)}')
50
+ "
51
+ }
52
+
53
+ # 下载最新备份
54
+ download_latest_backup() {
55
+ token="$HF_TOKEN"
56
+ repo_id="$DATASET_ID"
57
+
58
+ python3 -c "
59
+ from huggingface_hub import HfApi
60
+ import sys
61
+ import os
62
+ import tarfile
63
+ import tempfile
64
+ api = HfApi(token='$token')
65
+ try:
66
+ files = api.list_repo_files(repo_id='$repo_id', repo_type='dataset')
67
+ backup_files = [f for f in files if f.startswith('vaultwarden_backup_') and f.endswith('.tar.gz')]
68
+
69
+ if not backup_files:
70
+ print('No backup files found')
71
+ sys.exit()
72
+
73
+ latest_backup = sorted(backup_files)[-1]
74
+
75
+ with tempfile.TemporaryDirectory() as temp_dir:
76
+ filepath = api.hf_hub_download(
77
+ repo_id='$repo_id',
78
+ filename=latest_backup,
79
+ repo_type='dataset',
80
+ local_dir=temp_dir
81
+ )
82
+
83
+ if filepath and os.path.exists(filepath):
84
+ with tarfile.open(filepath, 'r:gz') as tar:
85
+ tar.extractall('/data')
86
+ print(f'Successfully restored backup from {latest_backup}')
87
+
88
+ except Exception as e:
89
+ print(f'Error downloading backup: {str(e)}')
90
+ "
91
+ }
92
+
93
+ # 首次启动时下载最新备份
94
+ echo "Downloading latest backup from HuggingFace..."
95
+ download_latest_backup
96
+
97
+ # 同步函数
98
+ sync_data() {
99
+ while true; do
100
+ echo "Starting sync process at $(date)"
101
+
102
+ if [ -d /data ]; then
103
+ timestamp=$(date +%Y%m%d_%H%M%S)
104
+ backup_file="vaultwarden_backup_${timestamp}.tar.gz"
105
+
106
+ # 压缩数据目录
107
+ tar -czf "/tmp/${backup_file}" -C /data .
108
+
109
+ echo "Uploading backup to HuggingFace..."
110
+ upload_backup "/tmp/${backup_file}" "${backup_file}"
111
+
112
+ rm -f "/tmp/${backup_file}"
113
+ else
114
+ echo "Data directory does not exist yet, waiting for next sync..."
115
+ fi
116
+
117
+ SYNC_INTERVAL=${SYNC_INTERVAL:-7200}
118
+ echo "Next sync in ${SYNC_INTERVAL} seconds..."
119
+ sleep $SYNC_INTERVAL
120
+ done
121
+ }
122
+
123
+ # 后台启动同步进程
124
+ sync_data &
125
+
126
+ # 启动 Vaultwarden
127
+ exec /start.sh