Update pages/1_🖼️_VertBox.py
Browse files- pages/1_🖼️_VertBox.py +382 -155
pages/1_🖼️_VertBox.py
CHANGED
@@ -1,195 +1,422 @@
|
|
1 |
import streamlit as st
|
2 |
-
import geopandas as gpd
|
3 |
import folium
|
4 |
-
from streamlit_folium import
|
5 |
from folium.plugins import Draw
|
6 |
-
import
|
7 |
-
from shapely.geometry import box
|
8 |
-
import json
|
9 |
-
from PIL import Image
|
10 |
-
import io
|
11 |
-
import numpy as np
|
12 |
import tempfile
|
13 |
import os
|
14 |
import urllib.request
|
|
|
|
|
|
|
|
|
|
|
15 |
import zipfile
|
16 |
-
|
17 |
-
|
|
|
|
|
18 |
|
19 |
# Constants
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
20 |
URL_STAC_SWISSTOPO_BASE = 'https://data.geo.admin.ch/api/stac/v0.9/collections/'
|
21 |
DIC_LAYERS = {
|
22 |
'ortho': 'ch.swisstopo.swissimage-dop10',
|
23 |
'mnt': 'ch.swisstopo.swissalti3d',
|
24 |
'mns': 'ch.swisstopo.swisssurface3d-raster',
|
25 |
-
'
|
26 |
'bati3D_v3': 'ch.swisstopo.swissbuildings3d_3_0',
|
27 |
}
|
28 |
|
29 |
-
FOLDER_NAME_SWISSTOPO = "swisstopo"
|
30 |
-
NB_POLYGONES_MAX = 2000000
|
31 |
-
|
32 |
# Helper functions
|
33 |
-
|
34 |
-
|
|
|
|
|
|
|
|
|
|
|
35 |
url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
|
36 |
-
with urllib.request.urlopen(url) as
|
37 |
-
|
38 |
-
return
|
39 |
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
47 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
48 |
while url:
|
49 |
-
with urllib.request.urlopen(url) as
|
50 |
-
json_res = json.
|
51 |
|
52 |
url = None
|
53 |
-
|
54 |
-
|
55 |
-
|
|
|
|
|
56 |
|
57 |
for item in json_res['features']:
|
58 |
for k, dic in item['assets'].items():
|
59 |
href = dic['href']
|
60 |
if gdb:
|
61 |
-
if href
|
62 |
-
res.append(href)
|
63 |
-
|
64 |
-
|
|
|
65 |
|
66 |
return res
|
67 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
def suppr_doublons_list_ortho(lst):
|
69 |
dic = {}
|
70 |
for url in lst:
|
71 |
nom, an, noflle, taille_px, epsg = url.split('/')[-1][:-4].split('_')
|
72 |
dic.setdefault((noflle, float(taille_px)), []).append((an, url))
|
73 |
-
|
|
|
|
|
|
|
|
|
74 |
|
75 |
-
def
|
76 |
-
|
77 |
-
for
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
112 |
-
|
113 |
-
|
114 |
-
with col2:
|
115 |
-
st.header("Carte")
|
116 |
-
m = folium.Map(location=[46.8, 8.2], zoom_start=8)
|
117 |
-
Draw(draw_options={'polyline': False, 'polygon': False, 'circle': False, 'marker': False, 'circlemarker': False},
|
118 |
-
edit_options={'edit': False}).add_to(m)
|
119 |
-
output = st_folium(m, width=700, height=500)
|
120 |
-
|
121 |
-
if output['last_active_drawing']:
|
122 |
-
coords = output['last_active_drawing']['geometry']['coordinates'][0]
|
123 |
-
xmin, ymin = min(c[0] for c in coords), min(c[1] for c in coords)
|
124 |
-
xmax, ymax = max(c[0] for c in coords), max(c[1] for c in coords)
|
125 |
-
st.success("Emprise sélectionnée!")
|
126 |
-
|
127 |
-
if st.button("Obtenir les données"):
|
128 |
-
bbox = (xmin, ymin, xmax, ymax)
|
129 |
-
urls = []
|
130 |
-
|
131 |
-
# MNT
|
132 |
-
if mnt2m or mnt50cm:
|
133 |
-
tri = '_2_' if mnt2m else '_0.5_'
|
134 |
-
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['mnt']
|
135 |
-
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox) if tri in v]
|
136 |
-
urls += lst
|
137 |
-
|
138 |
-
# MNS
|
139 |
-
if mns:
|
140 |
-
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['mns']
|
141 |
-
lst = get_list_from_STAC_swisstopo(url, *bbox)
|
142 |
-
urls += lst
|
143 |
-
|
144 |
-
# Bâtiments 3D
|
145 |
-
if bati3D:
|
146 |
-
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['bati3D']
|
147 |
-
lst = get_list_from_STAC_swisstopo(url, *bbox)
|
148 |
-
lst = suppr_doublons_bati3D(lst)
|
149 |
-
urls += lst
|
150 |
-
|
151 |
-
# Bâtiments 3D v3
|
152 |
-
if bati3D_v3:
|
153 |
-
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['bati3D_v3']
|
154 |
-
lst = get_list_from_STAC_swisstopo(url, *bbox, gdb=True)
|
155 |
-
urls += lst
|
156 |
-
|
157 |
-
# Orthophoto
|
158 |
-
if ortho2m or ortho10cm:
|
159 |
-
tri = '_2_' if ortho2m else '_0.1_'
|
160 |
-
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS['ortho']
|
161 |
-
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox) if tri in v]
|
162 |
-
lst = suppr_doublons_list_ortho(lst)
|
163 |
-
urls += lst
|
164 |
-
|
165 |
-
if urls:
|
166 |
-
st.write(f"Nombre de fichiers à télécharger : {len(urls)}")
|
167 |
-
|
168 |
-
# Create a temporary directory for downloads
|
169 |
-
with tempfile.TemporaryDirectory() as tmpdirname:
|
170 |
-
progress_bar = st.progress(0)
|
171 |
-
for i, url in enumerate(urls):
|
172 |
-
filename = os.path.join(tmpdirname, url.split('/')[-1])
|
173 |
-
download_file(url, filename)
|
174 |
-
if filename.endswith('.zip'):
|
175 |
-
unzip_file(filename, tmpdirname)
|
176 |
-
progress_bar.progress((i + 1) / len(urls))
|
177 |
-
|
178 |
-
st.success("Téléchargement terminé!")
|
179 |
-
|
180 |
-
# Create a zip file of all downloaded content
|
181 |
-
zip_filename = "swisstopo_data.zip"
|
182 |
-
shutil.make_archive(zip_filename[:-4], 'zip', tmpdirname)
|
183 |
-
|
184 |
-
with open(zip_filename, "rb") as fp:
|
185 |
-
btn = st.download_button(
|
186 |
-
label="Télécharger les données",
|
187 |
-
data=fp,
|
188 |
-
file_name=zip_filename,
|
189 |
-
mime="application/zip"
|
190 |
-
)
|
191 |
else:
|
192 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
193 |
|
194 |
-
|
195 |
-
main()
|
|
|
1 |
import streamlit as st
|
|
|
2 |
import folium
|
3 |
+
from streamlit_folium import st_folium
|
4 |
from folium.plugins import Draw
|
5 |
+
import geopandas as gpd
|
|
|
|
|
|
|
|
|
|
|
6 |
import tempfile
|
7 |
import os
|
8 |
import urllib.request
|
9 |
+
import json
|
10 |
+
from pathlib import Path
|
11 |
+
import datetime
|
12 |
+
from osgeo import gdal
|
13 |
+
import io
|
14 |
import zipfile
|
15 |
+
import base64
|
16 |
+
import concurrent.futures
|
17 |
+
import requests
|
18 |
+
from functools import partial
|
19 |
|
20 |
# Constants
|
21 |
+
CATEGORIES = {
|
22 |
+
'Gebueschwald': 'Forêt buissonnante',
|
23 |
+
'Wald': 'Forêt',
|
24 |
+
'Wald offen': 'Forêt claisemée',
|
25 |
+
'Gehoelzflaeche': 'Zone boisée',
|
26 |
+
}
|
27 |
+
MERGE_CATEGORIES = True
|
28 |
URL_STAC_SWISSTOPO_BASE = 'https://data.geo.admin.ch/api/stac/v0.9/collections/'
|
29 |
DIC_LAYERS = {
|
30 |
'ortho': 'ch.swisstopo.swissimage-dop10',
|
31 |
'mnt': 'ch.swisstopo.swissalti3d',
|
32 |
'mns': 'ch.swisstopo.swisssurface3d-raster',
|
33 |
+
'bati3D_v2': 'ch.swisstopo.swissbuildings3d_2',
|
34 |
'bati3D_v3': 'ch.swisstopo.swissbuildings3d_3_0',
|
35 |
}
|
36 |
|
|
|
|
|
|
|
37 |
# Helper functions
|
38 |
+
def wgs84_to_lv95(lat, lon):
|
39 |
+
url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={lon}&northing={lat}&format=json'
|
40 |
+
with urllib.request.urlopen(url) as response:
|
41 |
+
data = json.load(response)
|
42 |
+
return data['easting'], data['northing']
|
43 |
+
|
44 |
+
def lv95_to_wgs84(x, y):
|
45 |
url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
|
46 |
+
with urllib.request.urlopen(url) as response:
|
47 |
+
data = json.load(response)
|
48 |
+
return data['northing'], data['easting']
|
49 |
|
50 |
+
def detect_and_convert_bbox(bbox):
|
51 |
+
xmin, ymin, xmax, ymax = bbox
|
52 |
+
wgs84_margin = 0.9
|
53 |
+
wgs84_bounds = {
|
54 |
+
'xmin': 5.96 - wgs84_margin,
|
55 |
+
'ymin': 45.82 - wgs84_margin,
|
56 |
+
'xmax': 10.49 + wgs84_margin,
|
57 |
+
'ymax': 47.81 + wgs84_margin
|
58 |
+
}
|
59 |
+
lv95_margin = 100000
|
60 |
+
lv95_bounds = {
|
61 |
+
'xmin': 2485000 - lv95_margin,
|
62 |
+
'ymin': 1075000 - lv95_margin,
|
63 |
+
'xmax': 2834000 + lv95_margin,
|
64 |
+
'ymax': 1296000 + lv95_margin
|
65 |
+
}
|
66 |
|
67 |
+
if (wgs84_bounds['xmin'] <= xmin <= wgs84_bounds['xmax'] and
|
68 |
+
wgs84_bounds['ymin'] <= ymin <= wgs84_bounds['ymax'] and
|
69 |
+
wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
|
70 |
+
wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
|
71 |
+
lv95_min = wgs84_to_lv95(ymin, xmin)
|
72 |
+
lv95_max = wgs84_to_lv95(ymax, xmax)
|
73 |
+
bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
|
74 |
+
return (bbox, bbox_lv95)
|
75 |
+
|
76 |
+
if (lv95_bounds['xmin'] <= xmin <= lv95_bounds['xmax'] and
|
77 |
+
lv95_bounds['ymin'] <= ymin <= lv95_bounds['ymax'] and
|
78 |
+
lv95_bounds['xmin'] <= xmax <= lv95_bounds['xmax'] and
|
79 |
+
lv95_bounds['ymin'] <= ymax <= lv95_bounds['ymax']):
|
80 |
+
wgs84_min = lv95_to_wgs84(xmin, ymin)
|
81 |
+
wgs84_max = lv95_to_wgs84(xmax, ymax)
|
82 |
+
bbox_wgs84 = (wgs84_min[1], wgs84_min[0], wgs84_max[1], wgs84_max[0])
|
83 |
+
return (bbox_wgs84, bbox)
|
84 |
+
|
85 |
+
return None
|
86 |
+
|
87 |
+
def get_list_from_STAC_swisstopo(url, est, sud, ouest, nord, gdb=False):
|
88 |
+
lst_indesirables = [] if gdb else ['.xyz.zip', '.gdb.zip']
|
89 |
+
sufixe_url = f"/items?bbox={est},{sud},{ouest},{nord}"
|
90 |
+
url += sufixe_url
|
91 |
+
res = []
|
92 |
+
|
93 |
while url:
|
94 |
+
with urllib.request.urlopen(url) as response:
|
95 |
+
json_res = json.load(response)
|
96 |
|
97 |
url = None
|
98 |
+
links = json_res.get('links', None)
|
99 |
+
if links:
|
100 |
+
for link in links:
|
101 |
+
if link['rel'] == 'next':
|
102 |
+
url = link['href']
|
103 |
|
104 |
for item in json_res['features']:
|
105 |
for k, dic in item['assets'].items():
|
106 |
href = dic['href']
|
107 |
if gdb:
|
108 |
+
if href[-8:] == '.gdb.zip' and len(dic['href'].split('/')[-1].split('_')) == 7:
|
109 |
+
res.append(dic['href'])
|
110 |
+
else:
|
111 |
+
if href[-8:] not in lst_indesirables:
|
112 |
+
res.append(dic['href'])
|
113 |
|
114 |
return res
|
115 |
|
116 |
+
def suppr_doublons_bati3D_v2(lst_url):
|
117 |
+
dico = {}
|
118 |
+
dxf_files = [url for url in lst_url if url[-8:] == '.dxf.zip']
|
119 |
+
for dxf in dxf_files:
|
120 |
+
*a, date, feuille = dxf.split('/')[-2].split('_')
|
121 |
+
dico.setdefault(feuille, []).append((date, dxf))
|
122 |
+
res = []
|
123 |
+
for k, liste in dico.items():
|
124 |
+
res.append(sorted(liste, reverse=True)[0][1])
|
125 |
+
return res
|
126 |
+
|
127 |
+
def suppr_doublons_bati3D_v3(lst_url):
|
128 |
+
dico = {}
|
129 |
+
gdb_files = [url for url in lst_url if url[-8:] == '.gdb.zip']
|
130 |
+
for gdb in gdb_files:
|
131 |
+
*a, date, feuille = gdb.split('/')[-2].split('_')
|
132 |
+
dico.setdefault(feuille, []).append((date, gdb))
|
133 |
+
res = []
|
134 |
+
for k, liste in dico.items():
|
135 |
+
res.append(sorted(liste, reverse=True)[0][1])
|
136 |
+
return res
|
137 |
+
|
138 |
def suppr_doublons_list_ortho(lst):
|
139 |
dic = {}
|
140 |
for url in lst:
|
141 |
nom, an, noflle, taille_px, epsg = url.split('/')[-1][:-4].split('_')
|
142 |
dic.setdefault((noflle, float(taille_px)), []).append((an, url))
|
143 |
+
res = []
|
144 |
+
for noflle, lst in dic.items():
|
145 |
+
an, url = sorted(lst, reverse=True)[0]
|
146 |
+
res.append(url)
|
147 |
+
return res
|
148 |
|
149 |
+
def suppr_doublons_list_mnt(lst):
|
150 |
+
dic = {}
|
151 |
+
for url in lst:
|
152 |
+
nom, an, noflle, taille_px, epsg, inconnu = url.split('/')[-1][:-4].split('_')
|
153 |
+
dic.setdefault((noflle, float(taille_px)), []).append((an, url))
|
154 |
+
res = []
|
155 |
+
for noflle, lst in dic.items():
|
156 |
+
an, url = sorted(lst, reverse=True)[0]
|
157 |
+
res.append(url)
|
158 |
+
return res
|
159 |
+
|
160 |
+
@st.cache_data
|
161 |
+
def get_urls(bbox_wgs84, data_types, resolutions):
|
162 |
+
urls = []
|
163 |
+
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
164 |
+
future_to_data_type = {
|
165 |
+
executor.submit(
|
166 |
+
get_urls_for_data_type,
|
167 |
+
data_type,
|
168 |
+
bbox_wgs84,
|
169 |
+
resolutions.get(data_type)
|
170 |
+
): data_type
|
171 |
+
for data_type, enabled in data_types.items() if enabled
|
172 |
+
}
|
173 |
+
for future in concurrent.futures.as_completed(future_to_data_type):
|
174 |
+
data_type = future_to_data_type[future]
|
175 |
+
try:
|
176 |
+
urls.extend(future.result())
|
177 |
+
except Exception as exc:
|
178 |
+
st.error(f"Error fetching URLs for {data_type}: {exc}")
|
179 |
+
return urls
|
180 |
+
|
181 |
+
def get_urls_for_data_type(data_type, bbox_wgs84, resolution=None):
|
182 |
+
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS[data_type]
|
183 |
+
if data_type in ['mnt', 'ortho']:
|
184 |
+
tri = f'_{resolution}_'
|
185 |
+
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if tri in v]
|
186 |
+
if data_type == 'mnt':
|
187 |
+
return suppr_doublons_list_mnt(lst)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
188 |
else:
|
189 |
+
return suppr_doublons_list_ortho(lst)
|
190 |
+
elif data_type == 'mns':
|
191 |
+
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if 'raster' in v]
|
192 |
+
return suppr_doublons_list_mnt(lst)
|
193 |
+
elif data_type == 'bati3D_v2':
|
194 |
+
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84)
|
195 |
+
return suppr_doublons_bati3D_v2(lst)
|
196 |
+
elif data_type == 'bati3D_v3':
|
197 |
+
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84, gdb=True)
|
198 |
+
return suppr_doublons_bati3D_v3(lst)
|
199 |
+
return []
|
200 |
+
|
201 |
+
def fetch_url(url):
|
202 |
+
response = requests.get(url)
|
203 |
+
return response.content
|
204 |
+
|
205 |
+
def merge_ortho_images(urls, output_format='GTiff'):
|
206 |
+
try:
|
207 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
208 |
+
local_files = []
|
209 |
+
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
210 |
+
future_to_url = {executor.submit(fetch_url, url): url for url in urls}
|
211 |
+
for i, future in enumerate(concurrent.futures.as_completed(future_to_url)):
|
212 |
+
url = future_to_url[future]
|
213 |
+
try:
|
214 |
+
data = future.result()
|
215 |
+
local_filename = os.path.join(temp_dir, f"ortho_{i}.tif")
|
216 |
+
with open(local_filename, 'wb') as f:
|
217 |
+
f.write(data)
|
218 |
+
local_files.append(local_filename)
|
219 |
+
except Exception as exc:
|
220 |
+
st.error(f"Error downloading {url}: {exc}")
|
221 |
+
|
222 |
+
if not local_files:
|
223 |
+
st.error("No ortho images were successfully downloaded.")
|
224 |
+
return None
|
225 |
+
|
226 |
+
vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=False)
|
227 |
+
vrt_path = os.path.join(temp_dir, "merged.vrt")
|
228 |
+
vrt = gdal.BuildVRT(vrt_path, local_files, options=vrt_options)
|
229 |
+
vrt = None # Close the dataset
|
230 |
+
|
231 |
+
output_path = os.path.join(temp_dir, f"merged.{output_format.lower()}")
|
232 |
+
if output_format == 'GTiff':
|
233 |
+
translate_options = gdal.TranslateOptions(format="GTiff", creationOptions=["COMPRESS=LZW", "TILED=YES"])
|
234 |
+
elif output_format == 'JPEG':
|
235 |
+
translate_options = gdal.TranslateOptions(format="JPEG", creationOptions=["QUALITY=85"])
|
236 |
+
elif output_format == 'PNG':
|
237 |
+
translate_options = gdal.TranslateOptions(format="PNG", creationOptions=["COMPRESS=DEFLATE"])
|
238 |
+
else:
|
239 |
+
st.error(f"Unsupported output format: {output_format}")
|
240 |
+
return None
|
241 |
+
|
242 |
+
gdal.Translate(output_path, vrt_path, options=translate_options)
|
243 |
+
|
244 |
+
if not os.path.exists(output_path):
|
245 |
+
st.error(f"Failed to create merged image: {output_path}")
|
246 |
+
return None
|
247 |
+
|
248 |
+
with open(output_path, 'rb') as f:
|
249 |
+
return f.read()
|
250 |
+
except Exception as e:
|
251 |
+
st.error(f"Error in merge_ortho_images: {e}")
|
252 |
+
return None
|
253 |
+
|
254 |
+
def create_geojson_with_links(urls, bbox):
|
255 |
+
features = []
|
256 |
+
for url in urls:
|
257 |
+
feature = {
|
258 |
+
"type": "Feature",
|
259 |
+
"geometry": {
|
260 |
+
"type": "Polygon",
|
261 |
+
"coordinates": [bbox]
|
262 |
+
},
|
263 |
+
"properties": {
|
264 |
+
"url": url,
|
265 |
+
"type": url.split('/')[-2].split('_')[0]
|
266 |
+
}
|
267 |
+
}
|
268 |
+
features.append(feature)
|
269 |
+
|
270 |
+
geojson = {
|
271 |
+
"type": "FeatureCollection",
|
272 |
+
"features": features
|
273 |
+
}
|
274 |
+
return json.dumps(geojson)
|
275 |
+
|
276 |
+
@st.cache_data
|
277 |
+
def prepare_download_package(urls, bbox, ortho_format):
|
278 |
+
geojson_data = create_geojson_with_links(urls, bbox)
|
279 |
+
ortho_urls = [url for url in urls if 'swissimage-dop10' in url]
|
280 |
+
ortho_data = merge_ortho_images(ortho_urls, ortho_format) if ortho_urls else None
|
281 |
+
|
282 |
+
zip_buffer = io.BytesIO()
|
283 |
+
with zipfile.ZipFile(zip_buffer, 'w', zipfile.ZIP_DEFLATED) as zip_file:
|
284 |
+
zip_file.writestr('download_links.geojson', geojson_data)
|
285 |
+
if ortho_data:
|
286 |
+
zip_file.writestr(f'merged_ortho.{ortho_format.lower()}', ortho_data)
|
287 |
+
else:
|
288 |
+
st.warning("Failed to merge ortho images. Only download links will be included in the package.")
|
289 |
+
|
290 |
+
return zip_buffer.getvalue()
|
291 |
+
|
292 |
+
def geojson_forest(bbox, fn_geojson):
|
293 |
+
xmin, ymin, xmax, ymax = bbox
|
294 |
+
url_base = 'https://hepiadata.hesge.ch/arcgis/rest/services/suisse/TLM_C4D_couverture_sol/FeatureServer/1/query?'
|
295 |
+
sql = ' OR '.join([f"OBJEKTART='{cat}'" for cat in CATEGORIES.keys()])
|
296 |
+
params = {
|
297 |
+
"geometry": f"{xmin},{ymin},{xmax},{ymax}",
|
298 |
+
"geometryType": "esriGeometryEnvelope",
|
299 |
+
"returnGeometry": "true",
|
300 |
+
"outFields": "OBJEKTART",
|
301 |
+
"orderByFields": "OBJEKTART",
|
302 |
+
"where": sql,
|
303 |
+
"returnZ": "true",
|
304 |
+
"outSR": '2056',
|
305 |
+
"spatialRel": "esriSpatialRelIntersects",
|
306 |
+
"f": "geojson"
|
307 |
+
}
|
308 |
+
query_string = urllib.parse.urlencode(params)
|
309 |
+
url = url_base + query_string
|
310 |
+
with urllib.request.urlopen(url) as response:
|
311 |
+
data = json.load(response)
|
312 |
+
with open(fn_geojson, 'w') as f:
|
313 |
+
json.dump(data, f)
|
314 |
+
|
315 |
+
# Streamlit app
|
316 |
+
st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
|
317 |
+
st.title("Swiss Geospatial Data Downloader")
|
318 |
+
|
319 |
+
# Sidebar for data selection
|
320 |
+
st.sidebar.header("Data Selection")
|
321 |
+
data_types = {
|
322 |
+
'mnt': st.sidebar.checkbox("Digital Terrain Model (MNT)", value=True),
|
323 |
+
'mns': st.sidebar.checkbox("Digital Surface Model (MNS)", value=True),
|
324 |
+
'bati3D_v2': st.sidebar.checkbox("3D Buildings v2", value=True),
|
325 |
+
'bati3D_v3': st.sidebar.checkbox("3D Buildings v3", value=True),
|
326 |
+
'ortho': st.sidebar.checkbox("Orthophotos", value=True),
|
327 |
+
}
|
328 |
+
resolutions = {
|
329 |
+
'mnt': st.sidebar.selectbox("MNT Resolution", [0.5, 2.0], index=0),
|
330 |
+
'ortho': st.sidebar.selectbox("Orthophoto Resolution", [0.1, 2.0], index=0),
|
331 |
+
}
|
332 |
+
ortho_format = st.sidebar.selectbox("Ortho Output Format", ['GTiff', 'JPEG', 'PNG'], index=0)
|
333 |
+
|
334 |
+
# Main content area
|
335 |
+
st.subheader("Select Bounding Box")
|
336 |
+
|
337 |
+
# Create a map centered on Switzerland
|
338 |
+
m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
|
339 |
+
|
340 |
+
# Add rectangle draw control
|
341 |
+
draw = Draw(
|
342 |
+
draw_options={
|
343 |
+
'rectangle': True,
|
344 |
+
'polygon': False,
|
345 |
+
'polyline': False,
|
346 |
+
'circle': False,
|
347 |
+
'marker': False,
|
348 |
+
'circlemarker': False
|
349 |
+
},
|
350 |
+
edit_options={'edit': False}
|
351 |
+
)
|
352 |
+
draw.add_to(m)
|
353 |
+
|
354 |
+
# Use st_folium to render the map and get the drawn bbox
|
355 |
+
output = st_folium(m, width=700, height=500)
|
356 |
+
|
357 |
+
# Initialize session state for bbox
|
358 |
+
if 'bbox' not in st.session_state:
|
359 |
+
st.session_state.bbox = [6.0, 46.0, 10.0, 47.0] # Default values for Switzerland
|
360 |
+
|
361 |
+
# Update bbox if a new one is drawn
|
362 |
+
if output['last_active_drawing']:
|
363 |
+
coordinates = output['last_active_drawing']['geometry']['coordinates'][0]
|
364 |
+
st.session_state.bbox = [
|
365 |
+
min(coord[0] for coord in coordinates),
|
366 |
+
min(coord[1] for coord in coordinates),
|
367 |
+
max(coord[0] for coord in coordinates),
|
368 |
+
max(coord[1] for coord in coordinates)
|
369 |
+
]
|
370 |
+
|
371 |
+
# Display and allow editing of bounding box coordinates
|
372 |
+
st.subheader("Enter Bounding Box Coordinates")
|
373 |
+
col1, col2, col3, col4 = st.columns(4)
|
374 |
+
with col1:
|
375 |
+
xmin = st.number_input("Min Longitude", value=st.session_state.bbox[0], format="%.4f", key="xmin")
|
376 |
+
with col2:
|
377 |
+
ymin = st.number_input("Min Latitude", value=st.session_state.bbox[1], format="%.4f", key="ymin")
|
378 |
+
with col3:
|
379 |
+
xmax = st.number_input("Max Longitude", value=st.session_state.bbox[2], format="%.4f", key="xmax")
|
380 |
+
with col4:
|
381 |
+
ymax = st.number_input("Max Latitude", value=st.session_state.bbox[3], format="%.4f", key="ymax")
|
382 |
+
|
383 |
+
# Update session state if coordinates are manually changed
|
384 |
+
st.session_state.bbox = [xmin, ymin, xmax, ymax]
|
385 |
+
|
386 |
+
if st.session_state.bbox:
|
387 |
+
st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
|
388 |
+
bbox_results = detect_and_convert_bbox(st.session_state.bbox)
|
389 |
+
if bbox_results:
|
390 |
+
bbox_wgs84, bbox_lv95 = bbox_results
|
391 |
+
st.write(f"Converted bounding box (LV95): {bbox_lv95}")
|
392 |
+
|
393 |
+
if st.button("Get Download Package"):
|
394 |
+
with st.spinner("Preparing download package..."):
|
395 |
+
urls = get_urls(bbox_wgs84, data_types, resolutions)
|
396 |
+
if urls:
|
397 |
+
zip_data = prepare_download_package(urls, bbox_wgs84, ortho_format)
|
398 |
+
b64 = base64.b64encode(zip_data).decode()
|
399 |
+
href = f'<a href="data:application/zip;base64,{b64}" download="swiss_geospatial_data.zip">Download All Data</a>'
|
400 |
+
st.markdown(href, unsafe_allow_html=True)
|
401 |
+
st.success("Download package prepared. Click the link above to download.")
|
402 |
+
else:
|
403 |
+
st.warning("No files found for the selected area and options.")
|
404 |
+
|
405 |
+
if st.button("Download Forest Data"):
|
406 |
+
with st.spinner("Downloading forest data..."):
|
407 |
+
with tempfile.NamedTemporaryFile(mode='w', delete=False, suffix='.geojson') as tmp:
|
408 |
+
geojson_forest(bbox_lv95, tmp.name)
|
409 |
+
gdf = gpd.read_file(tmp.name)
|
410 |
+
st.write(gdf)
|
411 |
+
# Provide download link for forest data
|
412 |
+
with open(tmp.name, 'r') as f:
|
413 |
+
forest_data = f.read()
|
414 |
+
b64 = base64.b64encode(forest_data.encode()).decode()
|
415 |
+
href = f'<a href="data:application/json;base64,{b64}" download="forest_data.geojson">Download Forest Data</a>'
|
416 |
+
st.markdown(href, unsafe_allow_html=True)
|
417 |
+
os.unlink(tmp.name)
|
418 |
+
st.success("Forest data prepared. Click the link above to download.")
|
419 |
+
else:
|
420 |
+
st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
|
421 |
|
422 |
+
st.sidebar.info("This application allows you to download various types of geospatial data for Switzerland. Select the data types you want, draw a bounding box on the map, and click 'Get Download Package' to prepare all data for download.")
|
|