Commit
·
2cf5f58
1
Parent(s):
e9eefff
Add download script
Browse files- scripts/download.sh +51 -0
scripts/download.sh
ADDED
@@ -0,0 +1,51 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/bin/bash
|
2 |
+
source scripts/utils.sh echo -n
|
3 |
+
|
4 |
+
# Saner programming env: these switches turn some bugs into errors
|
5 |
+
set -o errexit -o pipefail
|
6 |
+
|
7 |
+
# This script is meant to be used with the command 'datalad run'
|
8 |
+
|
9 |
+
# Download dataset
|
10 |
+
files_url=(
|
11 |
+
"https://bigearth.net/downloads/BigEarthNet-S2-v1.0.tar.gz S2/BigEarthNet-S2-v1.0.tar.gz"
|
12 |
+
"https://bigearth.net/static/documents/Description_BigEarthNet-S2.pdf S2/Description_BigEarthNet-S2.pdf"
|
13 |
+
"https://bigearth.net/static/documents/patches_with_seasonal_snow.csv S2/patches_with_seasonal_snow.csv"
|
14 |
+
"https://bigearth.net/static/documents/patches_with_cloud_and_shadow.csv S2/patches_with_cloud_and_shadow.csv"
|
15 |
+
"https://bigearth.net/downloads/BigEarthNet-S1-v1.0.tar.gz S1/BigEarthNet-S1-v1.0.tar.gz"
|
16 |
+
"https://bigearth.net/static/documents/Description_BigEarthNet-S1.pdf S1/Description_BigEarthNet-S1.pdf"
|
17 |
+
"https://bigearth.net/static/documents/Description_BigEarthNet-MM.pdf MM/Description_BigEarthNet-MM.pdf")
|
18 |
+
|
19 |
+
git-annex addurl --fast -c annex.largefiles=anything --raw --batch --with-files <<EOF
|
20 |
+
$(for file_url in "${files_url[@]}" ; do echo "${file_url}" ; done)
|
21 |
+
EOF
|
22 |
+
# Downloads should complete correctly but in multiprocesses the last git-annex
|
23 |
+
# step most likely fails on a BGFS with the error "rename: resource busy
|
24 |
+
# (Device or resource busy)"
|
25 |
+
! git-annex get --fast -J8
|
26 |
+
# Remove the last byte from each files to prevent the "download failed:
|
27 |
+
# ResponseBodyTooShort" error
|
28 |
+
ls -l $(list) | grep -oE "\.git/[^']*" | \
|
29 |
+
cut -d'/' -f7 | xargs -n1 -- find .git/annex/tmp/ -name | \
|
30 |
+
while read f
|
31 |
+
do
|
32 |
+
newfsize=$(($(stat -c '%s' "${f}") - 1))
|
33 |
+
truncate -s $newfsize "${f}"
|
34 |
+
done
|
35 |
+
# Retry incomplete downloads
|
36 |
+
git-annex get --fast --incomplete
|
37 |
+
git-annex migrate --fast -c annex.largefiles=anything *
|
38 |
+
|
39 |
+
# Verify dataset
|
40 |
+
if [[ -f md5sums ]]
|
41 |
+
then
|
42 |
+
md5sum -c md5sums
|
43 |
+
fi
|
44 |
+
list -- --fast | while read f
|
45 |
+
do
|
46 |
+
if [[ -z "$(echo "${f}" | grep -E "^bin/")" ]] &&
|
47 |
+
[[ -z "$(grep -E " (\./)?${f//\./\\.}$" md5sums)" ]]
|
48 |
+
then
|
49 |
+
md5sum "${f}" >> md5sums
|
50 |
+
fi
|
51 |
+
done
|