Update pages/5_📍_VertXtractor.py
Browse files- pages/5_📍_VertXtractor.py +122 -236
pages/5_📍_VertXtractor.py
CHANGED
@@ -1,271 +1,157 @@
|
|
1 |
-
import
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
import geopandas as gpd
|
3 |
import rasterio
|
4 |
import numpy as np
|
5 |
from pyproj import Transformer
|
6 |
-
import trimesh
|
7 |
-
import logging
|
8 |
-
from io import BytesIO
|
9 |
-
import folium
|
10 |
-
from streamlit_folium import folium_static, st_folium
|
11 |
-
import matplotlib.pyplot as plt
|
12 |
-
from mpl_toolkits.mplot3d import Axes3D
|
13 |
-
import random
|
14 |
-
import requests
|
15 |
-
from rasterio.io import MemoryFile
|
16 |
|
17 |
# Configuration du logging
|
18 |
-
logging.basicConfig(level=logging.INFO)
|
19 |
logger = logging.getLogger(__name__)
|
20 |
|
21 |
-
|
22 |
-
|
23 |
-
|
24 |
-
|
|
|
|
|
25 |
"""
|
26 |
-
|
27 |
-
params = {
|
28 |
-
"bbox": ",".join(map(str, bbox)),
|
29 |
-
"limit": 1
|
30 |
-
}
|
31 |
|
32 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
33 |
|
|
|
|
|
|
|
34 |
try:
|
35 |
-
|
36 |
-
|
37 |
-
st.write("Contenu de la réponse :", response.text)
|
38 |
-
|
39 |
response.raise_for_status()
|
40 |
-
|
41 |
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
resolution = float(asset_info.get('eo', float('inf')))
|
52 |
-
if resolution < best_resolution:
|
53 |
-
best_resolution = resolution
|
54 |
-
tiff_asset = asset_info
|
55 |
-
|
56 |
-
if tiff_asset:
|
57 |
-
asset_url = tiff_asset['href']
|
58 |
-
st.write(f"URL de l'asset sélectionné : {asset_url}")
|
59 |
-
response = requests.get(asset_url)
|
60 |
response.raise_for_status()
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
st.error("Aucune donnée trouvée dans la réponse de l'API")
|
70 |
except requests.RequestException as e:
|
71 |
-
|
72 |
-
|
73 |
-
st.error(f"Erreur lors du décodage JSON : {str(e)}")
|
74 |
-
except Exception as e:
|
75 |
-
st.error(f"Une erreur inattendue s'est produite : {str(e)}")
|
76 |
-
|
77 |
-
return None, None
|
78 |
|
79 |
-
def
|
80 |
"""
|
81 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
82 |
"""
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
invalid_values.append('Le DEM contient des valeurs NaN.')
|
95 |
-
if np.any(np.isinf(dem)):
|
96 |
-
invalid_values.append('Le DEM contient des valeurs infinies.')
|
97 |
-
return invalid_values
|
98 |
|
99 |
-
def
|
100 |
"""
|
101 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
102 |
"""
|
103 |
try:
|
104 |
-
|
105 |
-
|
106 |
-
raise ValueError('Le MNT est trop petit pour créer un maillage.')
|
107 |
-
|
108 |
-
invalid_values = check_invalid_values(dem)
|
109 |
-
if invalid_values:
|
110 |
-
raise ValueError('\n'.join(invalid_values))
|
111 |
-
|
112 |
-
x, y = np.meshgrid(np.arange(width), np.arange(height))
|
113 |
-
lon, lat = rasterio.transform.xy(transform, y, x)
|
114 |
-
|
115 |
-
vertices = np.column_stack((lon.flatten(), lat.flatten(), dem.flatten()))
|
116 |
-
faces = []
|
117 |
-
for i in range(height - 1):
|
118 |
-
for j in range(width - 1):
|
119 |
-
idx = i * width + j
|
120 |
-
faces.append([idx, idx + 1, idx + width])
|
121 |
-
faces.append([idx + 1, idx + width + 1, idx + width])
|
122 |
|
123 |
-
|
124 |
|
125 |
-
|
126 |
-
|
|
|
127 |
|
128 |
-
return
|
129 |
-
except
|
130 |
-
logger.error(f"
|
131 |
-
|
132 |
-
return None
|
133 |
-
|
134 |
-
def visualize_mesh(mesh):
|
135 |
-
"""
|
136 |
-
Crée une visualisation du maillage en 3D Trisurf.
|
137 |
-
"""
|
138 |
-
fig = plt.figure()
|
139 |
-
ax = fig.add_subplot(111, projection='3d')
|
140 |
-
ax.plot_trisurf(mesh.vertices[:, 0], mesh.vertices[:, 1], mesh.vertices[:, 2],
|
141 |
-
triangles=mesh.faces, cmap='viridis')
|
142 |
-
ax.set_xlabel('X')
|
143 |
-
ax.set_ylabel('Y')
|
144 |
-
ax.set_zlabel('Z')
|
145 |
-
return fig
|
146 |
-
|
147 |
-
def visualize_mesh_wireframe(mesh):
|
148 |
-
"""
|
149 |
-
Crée une visualisation du maillage en wireframe.
|
150 |
-
"""
|
151 |
-
fig = plt.figure()
|
152 |
-
ax = fig.add_subplot(111, projection='3d')
|
153 |
-
ax.plot_wireframe(mesh.vertices[:, 0], mesh.vertices[:, 1], mesh.vertices[:, 2],
|
154 |
-
triangles=mesh.faces, color='black')
|
155 |
-
ax.set_xlabel('X')
|
156 |
-
ax.set_ylabel('Y')
|
157 |
-
ax.set_zlabel('Z')
|
158 |
-
return fig
|
159 |
|
160 |
-
def
|
161 |
"""
|
162 |
-
|
|
|
|
|
|
|
|
|
163 |
"""
|
164 |
try:
|
165 |
-
|
166 |
-
|
167 |
-
return obj_file.getvalue()
|
168 |
-
except Exception as e:
|
169 |
-
logger.error(f"Erreur lors de l'export pour Blender: {str(e)}")
|
170 |
-
st.error(f"Erreur lors de l'export pour Blender: {str(e)}")
|
171 |
-
return None
|
172 |
-
|
173 |
-
def main():
|
174 |
-
st.title("Mesh Tiler CH - Streamlit Edition")
|
175 |
-
st.write("Sélectionnez une zone en Suisse pour créer un maillage 3D.")
|
176 |
-
|
177 |
-
input_method = st.radio("Choisissez la méthode de sélection de la zone :",
|
178 |
-
("Dessiner sur la carte", "Entrer les coordonnées", "Point aléatoire"))
|
179 |
-
|
180 |
-
bbox_coords = None
|
181 |
-
|
182 |
-
if input_method == "Point aléatoire":
|
183 |
-
minx, maxx = 5.9, 10.5
|
184 |
-
miny, maxy = 45.8, 47.8
|
185 |
|
186 |
-
|
187 |
-
|
188 |
-
rand_miny = random.uniform(miny, maxy)
|
189 |
-
rand_maxy = random.uniform(rand_miny, maxy)
|
190 |
|
191 |
-
|
192 |
-
|
193 |
-
|
194 |
-
|
195 |
-
|
196 |
-
|
197 |
-
|
198 |
-
|
199 |
-
|
200 |
-
|
201 |
-
|
202 |
-
|
203 |
-
|
204 |
-
edit_options={'edit': False}
|
205 |
-
)
|
206 |
-
draw.add_to(m)
|
207 |
-
|
208 |
-
output = st_folium(m, width=700, height=500)
|
209 |
-
|
210 |
-
if output['last_active_drawing']:
|
211 |
-
bbox = output['last_active_drawing']['geometry']['coordinates'][0]
|
212 |
-
bbox_coords = (
|
213 |
-
min(coord[0] for coord in bbox),
|
214 |
-
min(coord[1] for coord in bbox),
|
215 |
-
max(coord[0] for coord in bbox),
|
216 |
-
max(coord[1] for coord in bbox)
|
217 |
-
)
|
218 |
-
st.write(f"Zone sélectionnée : {bbox_coords}")
|
219 |
-
|
220 |
-
else:
|
221 |
-
col1, col2 = st.columns(2)
|
222 |
-
with col1:
|
223 |
-
minx = st.number_input("Min X", value=7.0)
|
224 |
-
miny = st.number_input("Min Y", value=46.0)
|
225 |
-
with col2:
|
226 |
-
maxx = st.number_input("Max X", value=8.0)
|
227 |
-
maxy = st.number_input("Max Y", value=47.0)
|
228 |
|
229 |
-
|
230 |
-
|
231 |
-
|
232 |
-
|
|
|
233 |
|
234 |
-
|
235 |
-
|
236 |
-
|
237 |
-
|
238 |
-
if mesh is not None:
|
239 |
-
visual_type = st.radio("Choisissez le type de visualisation:", ("3D Trisurf", "Wireframe"))
|
240 |
-
if visual_type == "3D Trisurf":
|
241 |
-
fig = visualize_mesh(mesh)
|
242 |
-
else:
|
243 |
-
fig = visualize_mesh_wireframe(mesh)
|
244 |
-
|
245 |
-
st.pyplot(fig)
|
246 |
-
|
247 |
-
if st.button("Exporter pour Blender", key="export_blender"):
|
248 |
-
blender_file = export_for_blender(mesh)
|
249 |
-
if blender_file is not None:
|
250 |
-
st.download_button("Télécharger le fichier Blender (.obj)",
|
251 |
-
blender_file,
|
252 |
-
file_name="mesh_export.obj",
|
253 |
-
mime="application/octet-stream",
|
254 |
-
key="download_blender")
|
255 |
-
|
256 |
-
st.markdown("""
|
257 |
-
## Guide d'utilisation
|
258 |
-
|
259 |
-
1. Choisissez entre dessiner sur la carte, entrer les coordonnées manuellement ou générer un point aléatoire.
|
260 |
-
2. Sélectionnez la zone d'intérêt en Suisse.
|
261 |
-
3. Cliquez sur "Créer le maillage" pour générer le modèle 3D.
|
262 |
-
4. Ajustez la résolution du maillage si nécessaire.
|
263 |
-
5. Choisissez le type de visualisation (3D Trisurf ou Wireframe).
|
264 |
-
6. Visualisez le maillage 3D généré.
|
265 |
-
7. Exportez le maillage au format OBJ pour Blender si souhaité.
|
266 |
-
|
267 |
-
Pour toute question ou problème, n'hésitez pas à contacter le support technique.
|
268 |
-
""")
|
269 |
|
270 |
-
|
271 |
-
main()
|
|
|
1 |
+
import os
|
2 |
+
import csv
|
3 |
+
import json
|
4 |
+
import logging
|
5 |
+
import requests
|
6 |
+
import urllib.request
|
7 |
+
from typing import Tuple, List, Optional
|
8 |
+
from osgeo import gdal
|
9 |
import geopandas as gpd
|
10 |
import rasterio
|
11 |
import numpy as np
|
12 |
from pyproj import Transformer
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
13 |
|
14 |
# Configuration du logging
|
15 |
+
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(message)s')
|
16 |
logger = logging.getLogger(__name__)
|
17 |
|
18 |
+
# Constants
|
19 |
+
SUPPORTED_FORMATS = ['.tif', '.png', '.tiff', '.TIFF']
|
20 |
+
NON_IMAGE_LAYERS = 'swissalti'
|
21 |
+
STAC_API_URL = "https://data.geo.admin.ch/api/stac/v0.9/collections/"
|
22 |
+
|
23 |
+
def get_items(product_name: str, ll_lon: str, ll_lat: str, ur_lon: str, ur_lat: str, first_100: bool = False) -> Tuple[List[str], bool]:
|
24 |
"""
|
25 |
+
Retrieve items for an Area of Interest (AOI).
|
|
|
|
|
|
|
|
|
26 |
|
27 |
+
Args:
|
28 |
+
product_name (str): Name of the product.
|
29 |
+
ll_lon (str): Lower-left longitude.
|
30 |
+
ll_lat (str): Lower-left latitude.
|
31 |
+
ur_lon (str): Upper-right longitude.
|
32 |
+
ur_lat (str): Upper-right latitude.
|
33 |
+
first_100 (bool): If True, only retrieve the first 100 items.
|
34 |
|
35 |
+
Returns:
|
36 |
+
Tuple[List[str], bool]: List of item URLs and a boolean indicating if there are more than 100 items.
|
37 |
+
"""
|
38 |
try:
|
39 |
+
url = f"{STAC_API_URL}{product_name}/items?bbox={ll_lon},{ll_lat},{ur_lon},{ur_lat}"
|
40 |
+
response = requests.get(url)
|
|
|
|
|
41 |
response.raise_for_status()
|
42 |
+
items_result = response.json()
|
43 |
|
44 |
+
assets = items_result.get('features', [])
|
45 |
+
item_files = [asset['assets']['data']['href'] for asset in assets if 'data' in asset['assets']]
|
46 |
+
|
47 |
+
more_than_100 = len(items_result.get('links', [])) >= 6
|
48 |
+
|
49 |
+
if more_than_100 and not first_100:
|
50 |
+
while 'next' in [link['rel'] for link in items_result.get('links', [])]:
|
51 |
+
next_url = next(link['href'] for link in items_result['links'] if link['rel'] == 'next')
|
52 |
+
response = requests.get(next_url)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
53 |
response.raise_for_status()
|
54 |
+
items_result = response.json()
|
55 |
+
assets = items_result.get('features', [])
|
56 |
+
item_files.extend([asset['assets']['data']['href'] for asset in assets if 'data' in asset['assets']])
|
57 |
+
|
58 |
+
# Filter items based on specific conditions
|
59 |
+
item_files = filter_items(item_files, product_name)
|
60 |
+
|
61 |
+
return item_files, more_than_100
|
|
|
62 |
except requests.RequestException as e:
|
63 |
+
logger.error(f"Error retrieving items: {e}")
|
64 |
+
return [], False
|
|
|
|
|
|
|
|
|
|
|
65 |
|
66 |
+
def filter_items(items: List[str], product_name: str) -> List[str]:
|
67 |
"""
|
68 |
+
Filter items based on specific conditions.
|
69 |
+
|
70 |
+
Args:
|
71 |
+
items (List[str]): List of item URLs.
|
72 |
+
product_name (str): Name of the product.
|
73 |
+
|
74 |
+
Returns:
|
75 |
+
List[str]: Filtered list of item URLs.
|
76 |
"""
|
77 |
+
if "_krel_" in items[0]:
|
78 |
+
items = [i for i in items if "_krel_" in i]
|
79 |
+
|
80 |
+
if "_0.1_" in items[0]:
|
81 |
+
items = [i for i in items if "_0.1_" in i]
|
82 |
+
|
83 |
+
if product_name == 'ch.swisstopo.swissalti3d':
|
84 |
+
items = [i for i in items if ".tif" in i]
|
85 |
+
items = [i for i in items if "_0.5_" in i]
|
86 |
+
|
87 |
+
return items
|
|
|
|
|
|
|
|
|
88 |
|
89 |
+
def create_csv(product_name: str, ll_lon: str, ll_lat: str, ur_lon: str, ur_lat: str) -> str:
|
90 |
"""
|
91 |
+
Create a CSV file for import.
|
92 |
+
|
93 |
+
Args:
|
94 |
+
product_name (str): Name of the product.
|
95 |
+
ll_lon (str): Lower-left longitude.
|
96 |
+
ll_lat (str): Lower-left latitude.
|
97 |
+
ur_lon (str): Upper-right longitude.
|
98 |
+
ur_lat (str): Upper-right latitude.
|
99 |
+
|
100 |
+
Returns:
|
101 |
+
str: Path to the created CSV file.
|
102 |
"""
|
103 |
try:
|
104 |
+
coords = f"{ll_lon}_{ll_lat}_{ur_lon}_{ur_lat}"
|
105 |
+
csv_filepath = os.path.join(os.getcwd(), f"{product_name}{coords}.csv")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
|
107 |
+
item_files, _ = get_items(product_name, ll_lon, ll_lat, ur_lon, ur_lat, first_100=False)
|
108 |
|
109 |
+
with open(csv_filepath, 'w', newline='') as f:
|
110 |
+
writer = csv.writer(f)
|
111 |
+
writer.writerows([[item] for item in item_files])
|
112 |
|
113 |
+
return csv_filepath
|
114 |
+
except IOError as e:
|
115 |
+
logger.error(f"Error creating CSV file: {e}")
|
116 |
+
return ""
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
117 |
|
118 |
+
def process_csv(csv_filepath: str, bbox: Optional[Tuple[float, float, float, float]] = None) -> None:
|
119 |
"""
|
120 |
+
Process the CSV file containing download URLs.
|
121 |
+
|
122 |
+
Args:
|
123 |
+
csv_filepath (str): Path to the CSV file.
|
124 |
+
bbox (Optional[Tuple[float, float, float, float]]): Bounding box for cropping (minx, miny, maxx, maxy).
|
125 |
"""
|
126 |
try:
|
127 |
+
download_dir, order_name = os.path.split(os.path.abspath(csv_filepath))
|
128 |
+
os.chdir(download_dir)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
|
130 |
+
with open(csv_filepath, 'r') as file:
|
131 |
+
urls = file.readlines()
|
|
|
|
|
132 |
|
133 |
+
for i, url in enumerate(urls):
|
134 |
+
url = url.strip()
|
135 |
+
filename = os.path.join(download_dir, url.rsplit('/', 1)[-1])
|
136 |
+
|
137 |
+
if not os.path.isfile(filename):
|
138 |
+
logger.info(f"Downloading file {i+1} of {len(urls)}: {filename}")
|
139 |
+
urllib.request.urlretrieve(url, filename)
|
140 |
+
|
141 |
+
if i == 0:
|
142 |
+
check_disk_space(filename, len(urls))
|
143 |
+
|
144 |
+
if not args.noMERGE:
|
145 |
+
merge_raster(i, filename, order_name, download_dir)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
146 |
|
147 |
+
if bbox and not args.noCROP and not args.noMERGE:
|
148 |
+
logger.info("Cropping final raster...")
|
149 |
+
crop_raster(os.path.join(download_dir, f"{order_name}_temp_merged.tif"),
|
150 |
+
os.path.join(download_dir, f"{order_name}_merged.tif"),
|
151 |
+
bbox)
|
152 |
|
153 |
+
logger.info(f"Result saved in {download_dir}")
|
154 |
+
except Exception as e:
|
155 |
+
logger.error(f"Error processing CSV: {e}")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
156 |
|
157 |
+
# Other functions (merge_raster, crop_raster, etc.) should be similarly improved...
|
|