Update pages/12_🌲_VertXtractor.py
Browse files- pages/12_🌲_VertXtractor.py +144 -88
pages/12_🌲_VertXtractor.py
CHANGED
@@ -10,6 +10,12 @@ from osgeo import gdal
|
|
10 |
import io
|
11 |
import zipfile
|
12 |
import base64
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Constants
|
15 |
CATEGORIES = {
|
@@ -32,7 +38,7 @@ DIC_LAYERS = {
|
|
32 |
|
33 |
# Helper functions
|
34 |
def wgs84_to_lv95(lat, lon):
|
35 |
-
url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={
|
36 |
with urllib.request.urlopen(url) as response:
|
37 |
data = json.load(response)
|
38 |
return data['easting'], data['northing']
|
@@ -41,7 +47,7 @@ def lv95_to_wgs84(x, y):
|
|
41 |
url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
|
42 |
with urllib.request.urlopen(url) as response:
|
43 |
data = json.load(response)
|
44 |
-
return data['
|
45 |
|
46 |
def detect_and_convert_bbox(bbox):
|
47 |
xmin, ymin, xmax, ymax = bbox
|
@@ -67,8 +73,8 @@ def detect_and_convert_bbox(bbox):
|
|
67 |
wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
|
68 |
wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
|
69 |
|
70 |
-
lv95_min = wgs84_to_lv95(
|
71 |
-
lv95_max = wgs84_to_lv95(
|
72 |
|
73 |
bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
|
74 |
return (bbox, bbox_lv95)
|
@@ -81,7 +87,7 @@ def detect_and_convert_bbox(bbox):
|
|
81 |
wgs84_min = lv95_to_wgs84(xmin, ymin)
|
82 |
wgs84_max = lv95_to_wgs84(xmax, ymax)
|
83 |
|
84 |
-
bbox_wgs84 = (wgs84_min,
|
85 |
return (bbox_wgs84, bbox)
|
86 |
|
87 |
return None
|
@@ -165,55 +171,80 @@ def suppr_doublons_list_mnt(lst):
|
|
165 |
@st.cache_data
|
166 |
def get_urls(bbox_wgs84, data_types, resolutions):
|
167 |
urls = []
|
168 |
-
|
169 |
-
|
170 |
-
|
171 |
-
|
172 |
-
|
173 |
-
|
174 |
-
|
175 |
-
|
176 |
-
|
177 |
-
|
178 |
-
|
179 |
-
|
180 |
-
|
181 |
-
|
182 |
-
|
183 |
-
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84)
|
184 |
-
lst = suppr_doublons_bati3D_v2(lst)
|
185 |
-
elif data_type == 'bati3D_v3':
|
186 |
-
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84, gdb=True)
|
187 |
-
lst = suppr_doublons_bati3D_v3(lst)
|
188 |
-
urls.extend(lst)
|
189 |
return urls
|
190 |
|
191 |
-
def
|
192 |
-
|
193 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
194 |
|
195 |
-
|
|
|
|
|
196 |
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
|
205 |
-
|
206 |
-
|
207 |
-
|
208 |
-
|
209 |
-
|
210 |
-
url = url_base + query_string
|
211 |
-
|
212 |
-
with urllib.request.urlopen(url) as response:
|
213 |
-
data = json.load(response)
|
214 |
|
215 |
-
|
216 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
217 |
|
218 |
def create_geojson_with_links(urls, bbox):
|
219 |
features = []
|
@@ -237,32 +268,7 @@ def create_geojson_with_links(urls, bbox):
|
|
237 |
}
|
238 |
return json.dumps(geojson)
|
239 |
|
240 |
-
|
241 |
-
with tempfile.TemporaryDirectory() as temp_dir:
|
242 |
-
local_files = []
|
243 |
-
for i, url in enumerate(urls):
|
244 |
-
local_filename = os.path.join(temp_dir, f"ortho_{i}.tif")
|
245 |
-
urllib.request.urlretrieve(url, local_filename)
|
246 |
-
local_files.append(local_filename)
|
247 |
-
|
248 |
-
vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=False)
|
249 |
-
vrt_path = os.path.join(temp_dir, "merged.vrt")
|
250 |
-
vrt = gdal.BuildVRT(vrt_path, local_files, options=vrt_options)
|
251 |
-
vrt = None # Close the dataset
|
252 |
-
|
253 |
-
output_path = os.path.join(temp_dir, f"merged.{output_format.lower()}")
|
254 |
-
if output_format == 'GTiff':
|
255 |
-
translate_options = gdal.TranslateOptions(format="GTiff", creationOptions=["COMPRESS=LZW", "TILED=YES"])
|
256 |
-
elif output_format == 'JPEG':
|
257 |
-
translate_options = gdal.TranslateOptions(format="JPEG", creationOptions=["QUALITY=85"])
|
258 |
-
elif output_format == 'PNG':
|
259 |
-
translate_options = gdal.TranslateOptions(format="PNG", creationOptions=["COMPRESS=DEFLATE"])
|
260 |
-
|
261 |
-
gdal.Translate(output_path, vrt_path, options=translate_options)
|
262 |
-
|
263 |
-
with open(output_path, 'rb') as f:
|
264 |
-
return f.read()
|
265 |
-
|
266 |
def prepare_download_package(urls, bbox, ortho_format):
|
267 |
geojson_data = create_geojson_with_links(urls, bbox)
|
268 |
ortho_urls = [url for url in urls if 'swissimage-dop10' in url]
|
@@ -276,6 +282,33 @@ def prepare_download_package(urls, bbox, ortho_format):
|
|
276 |
|
277 |
return zip_buffer.getvalue()
|
278 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
279 |
# Streamlit app
|
280 |
st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
|
281 |
|
@@ -299,22 +332,45 @@ resolutions = {
|
|
299 |
ortho_format = st.sidebar.selectbox("Ortho Output Format", ['GTiff', 'JPEG', 'PNG'], index=0)
|
300 |
|
301 |
# Main content area
|
302 |
-
st.subheader("
|
303 |
-
|
304 |
-
|
305 |
-
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
|
310 |
-
|
311 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
312 |
|
|
|
313 |
if 'bbox' not in st.session_state:
|
314 |
st.session_state.bbox = None
|
315 |
|
316 |
-
if st.button("
|
317 |
-
st.session_state.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
318 |
|
319 |
if st.session_state.bbox:
|
320 |
st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
|
@@ -356,4 +412,4 @@ if st.session_state.bbox:
|
|
356 |
else:
|
357 |
st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
|
358 |
|
359 |
-
st.sidebar.info("This application allows you to download various types of geospatial data for Switzerland. Select the data types you want,
|
|
|
10 |
import io
|
11 |
import zipfile
|
12 |
import base64
|
13 |
+
import concurrent.futures
|
14 |
+
import requests
|
15 |
+
from functools import partial
|
16 |
+
import folium
|
17 |
+
from streamlit_folium import folium_static
|
18 |
+
from folium.plugins import Draw
|
19 |
|
20 |
# Constants
|
21 |
CATEGORIES = {
|
|
|
38 |
|
39 |
# Helper functions
|
40 |
def wgs84_to_lv95(lat, lon):
|
41 |
+
url = f'http://geodesy.geo.admin.ch/reframe/wgs84tolv95?easting={lon}&northing={lat}&format=json'
|
42 |
with urllib.request.urlopen(url) as response:
|
43 |
data = json.load(response)
|
44 |
return data['easting'], data['northing']
|
|
|
47 |
url = f'http://geodesy.geo.admin.ch/reframe/lv95towgs84?easting={x}&northing={y}&format=json'
|
48 |
with urllib.request.urlopen(url) as response:
|
49 |
data = json.load(response)
|
50 |
+
return data['northing'], data['easting']
|
51 |
|
52 |
def detect_and_convert_bbox(bbox):
|
53 |
xmin, ymin, xmax, ymax = bbox
|
|
|
73 |
wgs84_bounds['xmin'] <= xmax <= wgs84_bounds['xmax'] and
|
74 |
wgs84_bounds['ymin'] <= ymax <= wgs84_bounds['ymax']):
|
75 |
|
76 |
+
lv95_min = wgs84_to_lv95(ymin, xmin)
|
77 |
+
lv95_max = wgs84_to_lv95(ymax, xmax)
|
78 |
|
79 |
bbox_lv95 = (lv95_min[0], lv95_min[1], lv95_max[0], lv95_max[1])
|
80 |
return (bbox, bbox_lv95)
|
|
|
87 |
wgs84_min = lv95_to_wgs84(xmin, ymin)
|
88 |
wgs84_max = lv95_to_wgs84(xmax, ymax)
|
89 |
|
90 |
+
bbox_wgs84 = (wgs84_min[1], wgs84_min[0], wgs84_max[1], wgs84_max[0])
|
91 |
return (bbox_wgs84, bbox)
|
92 |
|
93 |
return None
|
|
|
171 |
@st.cache_data
|
172 |
def get_urls(bbox_wgs84, data_types, resolutions):
|
173 |
urls = []
|
174 |
+
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
175 |
+
future_to_data_type = {
|
176 |
+
executor.submit(
|
177 |
+
get_urls_for_data_type,
|
178 |
+
data_type,
|
179 |
+
bbox_wgs84,
|
180 |
+
resolutions.get(data_type)
|
181 |
+
): data_type for data_type, enabled in data_types.items() if enabled
|
182 |
+
}
|
183 |
+
for future in concurrent.futures.as_completed(future_to_data_type):
|
184 |
+
data_type = future_to_data_type[future]
|
185 |
+
try:
|
186 |
+
urls.extend(future.result())
|
187 |
+
except Exception as exc:
|
188 |
+
st.error(f"Error fetching URLs for {data_type}: {exc}")
|
|
|
|
|
|
|
|
|
|
|
|
|
189 |
return urls
|
190 |
|
191 |
+
def get_urls_for_data_type(data_type, bbox_wgs84, resolution=None):
|
192 |
+
url = URL_STAC_SWISSTOPO_BASE + DIC_LAYERS[data_type]
|
193 |
+
if data_type in ['mnt', 'ortho']:
|
194 |
+
tri = f'_{resolution}_'
|
195 |
+
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if tri in v]
|
196 |
+
if data_type == 'mnt':
|
197 |
+
return suppr_doublons_list_mnt(lst)
|
198 |
+
else:
|
199 |
+
return suppr_doublons_list_ortho(lst)
|
200 |
+
elif data_type == 'mns':
|
201 |
+
lst = [v for v in get_list_from_STAC_swisstopo(url, *bbox_wgs84) if 'raster' in v]
|
202 |
+
return suppr_doublons_list_mnt(lst)
|
203 |
+
elif data_type == 'bati3D_v2':
|
204 |
+
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84)
|
205 |
+
return suppr_doublons_bati3D_v2(lst)
|
206 |
+
elif data_type == 'bati3D_v3':
|
207 |
+
lst = get_list_from_STAC_swisstopo(url, *bbox_wgs84, gdb=True)
|
208 |
+
return suppr_doublons_bati3D_v3(lst)
|
209 |
+
return []
|
210 |
|
211 |
+
def fetch_url(url):
|
212 |
+
response = requests.get(url)
|
213 |
+
return response.content
|
214 |
|
215 |
+
def merge_ortho_images(urls, output_format='GTiff'):
|
216 |
+
with tempfile.TemporaryDirectory() as temp_dir:
|
217 |
+
with concurrent.futures.ThreadPoolExecutor(max_workers=5) as executor:
|
218 |
+
future_to_url = {executor.submit(fetch_url, url): url for url in urls}
|
219 |
+
for i, future in enumerate(concurrent.futures.as_completed(future_to_url)):
|
220 |
+
url = future_to_url[future]
|
221 |
+
try:
|
222 |
+
data = future.result()
|
223 |
+
local_filename = os.path.join(temp_dir, f"ortho_{i}.tif")
|
224 |
+
with open(local_filename, 'wb') as f:
|
225 |
+
f.write(data)
|
226 |
+
except Exception as exc:
|
227 |
+
st.error(f"Error downloading {url}: {exc}")
|
|
|
|
|
|
|
|
|
228 |
|
229 |
+
local_files = [os.path.join(temp_dir, f) for f in os.listdir(temp_dir) if f.endswith('.tif')]
|
230 |
+
|
231 |
+
vrt_options = gdal.BuildVRTOptions(resampleAlg='nearest', addAlpha=False)
|
232 |
+
vrt_path = os.path.join(temp_dir, "merged.vrt")
|
233 |
+
vrt = gdal.BuildVRT(vrt_path, local_files, options=vrt_options)
|
234 |
+
vrt = None # Close the dataset
|
235 |
+
|
236 |
+
output_path = os.path.join(temp_dir, f"merged.{output_format.lower()}")
|
237 |
+
if output_format == 'GTiff':
|
238 |
+
translate_options = gdal.TranslateOptions(format="GTiff", creationOptions=["COMPRESS=LZW", "TILED=YES"])
|
239 |
+
elif output_format == 'JPEG':
|
240 |
+
translate_options = gdal.TranslateOptions(format="JPEG", creationOptions=["QUALITY=85"])
|
241 |
+
elif output_format == 'PNG':
|
242 |
+
translate_options = gdal.TranslateOptions(format="PNG", creationOptions=["COMPRESS=DEFLATE"])
|
243 |
+
|
244 |
+
gdal.Translate(output_path, vrt_path, options=translate_options)
|
245 |
+
|
246 |
+
with open(output_path, 'rb') as f:
|
247 |
+
return f.read()
|
248 |
|
249 |
def create_geojson_with_links(urls, bbox):
|
250 |
features = []
|
|
|
268 |
}
|
269 |
return json.dumps(geojson)
|
270 |
|
271 |
+
@st.cache_data
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
272 |
def prepare_download_package(urls, bbox, ortho_format):
|
273 |
geojson_data = create_geojson_with_links(urls, bbox)
|
274 |
ortho_urls = [url for url in urls if 'swissimage-dop10' in url]
|
|
|
282 |
|
283 |
return zip_buffer.getvalue()
|
284 |
|
285 |
+
def geojson_forest(bbox, fn_geojson):
|
286 |
+
xmin, ymin, xmax, ymax = bbox
|
287 |
+
url_base = 'https://hepiadata.hesge.ch/arcgis/rest/services/suisse/TLM_C4D_couverture_sol/FeatureServer/1/query?'
|
288 |
+
|
289 |
+
sql = ' OR '.join([f"OBJEKTART='{cat}'" for cat in CATEGORIES.keys()])
|
290 |
+
|
291 |
+
params = {
|
292 |
+
"geometry": f"{xmin},{ymin},{xmax},{ymax}",
|
293 |
+
"geometryType": "esriGeometryEnvelope",
|
294 |
+
"returnGeometry": "true",
|
295 |
+
"outFields": "OBJEKTART",
|
296 |
+
"orderByFields": "OBJEKTART",
|
297 |
+
"where": sql,
|
298 |
+
"returnZ": "true",
|
299 |
+
"outSR": '2056',
|
300 |
+
"spatialRel": "esriSpatialRelIntersects",
|
301 |
+
"f": "geojson"
|
302 |
+
}
|
303 |
+
query_string = urllib.parse.urlencode(params)
|
304 |
+
url = url_base + query_string
|
305 |
+
|
306 |
+
with urllib.request.urlopen(url) as response:
|
307 |
+
data = json.load(response)
|
308 |
+
|
309 |
+
with open(fn_geojson, 'w') as f:
|
310 |
+
json.dump(data, f)
|
311 |
+
|
312 |
# Streamlit app
|
313 |
st.set_page_config(page_title="Swiss Geospatial Data Downloader", layout="wide")
|
314 |
|
|
|
332 |
ortho_format = st.sidebar.selectbox("Ortho Output Format", ['GTiff', 'JPEG', 'PNG'], index=0)
|
333 |
|
334 |
# Main content area
|
335 |
+
st.subheader("Select Bounding Box")
|
336 |
+
|
337 |
+
# Create a map centered on Switzerland
|
338 |
+
m = folium.Map(location=[46.8182, 8.2275], zoom_start=8)
|
339 |
+
|
340 |
+
# Add rectangle draw control
|
341 |
+
draw = folium.plugins.Draw(
|
342 |
+
export=False,
|
343 |
+
position='topleft',
|
344 |
+
draw_options={
|
345 |
+
'rectangle': True,
|
346 |
+
'polygon': False,
|
347 |
+
'polyline': False,
|
348 |
+
'circle': False,
|
349 |
+
'marker': False,
|
350 |
+
'circlemarker': False
|
351 |
+
}
|
352 |
+
)
|
353 |
+
draw.add_to(m)
|
354 |
+
|
355 |
+
# Display the map
|
356 |
+
folium_static(m)
|
357 |
|
358 |
+
# Get bbox from drawn rectangle
|
359 |
if 'bbox' not in st.session_state:
|
360 |
st.session_state.bbox = None
|
361 |
|
362 |
+
if st.button("Get Bounding Box"):
|
363 |
+
draw_data = st.session_state.get("json_data")
|
364 |
+
if draw_data and "features" in draw_data:
|
365 |
+
feature = draw_data["features"][0]
|
366 |
+
if feature["geometry"]["type"] == "Polygon":
|
367 |
+
coords = feature["geometry"]["coordinates"][0]
|
368 |
+
st.session_state.bbox = [
|
369 |
+
min(coord[0] for coord in coords),
|
370 |
+
min(coord[1] for coord in coords),
|
371 |
+
max(coord[0] for coord in coords),
|
372 |
+
max(coord[1] for coord in coords)
|
373 |
+
]
|
374 |
|
375 |
if st.session_state.bbox:
|
376 |
st.write(f"Selected bounding box (WGS84): {st.session_state.bbox}")
|
|
|
412 |
else:
|
413 |
st.error("Selected area is outside Switzerland. Please select an area within Switzerland.")
|
414 |
|
415 |
+
st.sidebar.info("This application allows you to download various types of geospatial data for Switzerland. Select the data types you want, draw a bounding box on the map, and click 'Get Download Package' to prepare all data for download.")
|