joshuasundance commited on
Commit
50e151e
·
1 Parent(s): 71a77f2

Upload to_mongo_4326reproj.py

Browse files
Files changed (1) hide show
  1. to_mongo_4326reproj.py +98 -111
to_mongo_4326reproj.py CHANGED
@@ -3,13 +3,13 @@ import json
3
  import os
4
  import random
5
  import uuid
6
- from collections.abc import MutableMapping
7
  from glob import glob
8
- from typing import AsyncGenerator
9
 
10
  import aiofiles
11
  import nest_asyncio
12
  from motor.motor_asyncio import AsyncIOMotorClient
 
13
  from pymongo.server_api import ServerApi
14
  from pyproj import CRS, Proj, Transformer
15
  from pyproj.exceptions import CRSError
@@ -19,6 +19,25 @@ from shapely.geometry import Polygon
19
  nest_asyncio.apply()
20
 
21
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
22
  def create_uuid(input_str: str) -> str:
23
  # Consistent random UUIDs based on input string
24
  # https://nathanielknight.ca/articles/consistent_random_uuids_in_python.html
@@ -28,7 +47,7 @@ def create_uuid(input_str: str) -> str:
28
  )
29
 
30
 
31
- async def reproject_to_4326_and_convert_to_geojson(
32
  bbox: dict,
33
  ) -> dict:
34
  """
@@ -45,14 +64,14 @@ async def reproject_to_4326_and_convert_to_geojson(
45
  def get_src_proj() -> Proj:
46
  sr = bbox["spatialReference"]
47
  for wkid in ["latestWkid", "wkid"]:
48
- if wkid in sr:
49
  for authority in ["EPSG", "ESRI"]:
50
  try:
51
- return Proj(f"{authority}:{sr[wkid]}")
52
  except CRSError:
53
  pass
54
- if "wkt" in sr:
55
- return Proj(CRS.from_wkt(sr["wkt"]))
56
  raise ValueError("no spatialReference found")
57
 
58
  src_proj = get_src_proj()
@@ -86,48 +105,37 @@ async def reproject_to_4326_and_convert_to_geojson(
86
  return geojson
87
 
88
 
89
- async def process_nested_entries_v2(
90
- input_data: dict,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
91
  ) -> dict:
92
- required_keys = {"xmin", "ymin", "xmax", "ymax", "spatialReference"}
93
-
94
- if not isinstance(input_data, (dict, list)):
95
- raise ValueError("Input data must be a dictionary or list")
96
-
97
- stack = [(None, input_data, None)] # (parent, current, key/index)
98
-
99
- while stack:
100
- parent, current, key = stack.pop()
101
-
102
- if (
103
- isinstance(current, dict)
104
- and required_keys.issubset(current.keys())
105
- and not any(
106
- str(current.get(coord, "nan")).lower() == "nan"
107
- for coord in ["xmin", "ymin", "xmax", "ymax"]
108
- )
109
- ):
110
- try:
111
- result = await reproject_to_4326_and_convert_to_geojson(current)
112
- except (ValueError, GEOSException, CRSError) as e:
113
- print(current, e)
114
- result = current # Optionally keep the original item on error
115
- if parent is not None:
116
- if isinstance(parent, MutableMapping):
117
- parent[key] = result
118
- else: # List
119
- parent[key] = result
120
- elif isinstance(current, MutableMapping):
121
- for k, v in current.items():
122
- stack.append((current, v, k)) # type: ignore
123
- elif isinstance(current, list):
124
- for i, item in enumerate(current):
125
- stack.append((current, item, i))
126
-
127
- return input_data
128
-
129
-
130
- def process_metadata(metadata: dict, additional_fields: dict = {}) -> dict:
131
  # Process metadata and add any additional fields
132
  processed_md = {
133
  k: v for k, v in metadata.items() if k not in ["folders", "services", "layers"]
@@ -137,6 +145,17 @@ def process_metadata(metadata: dict, additional_fields: dict = {}) -> dict:
137
  processed_md["id"] = processed_md["hash"]
138
  del processed_md["hash"]
139
 
 
 
 
 
 
 
 
 
 
 
 
140
  return processed_md
141
 
142
 
@@ -144,7 +163,7 @@ def get_type(layer: dict) -> str:
144
  return layer.get("type", "unknown").lower().replace(" ", "_").strip()
145
 
146
 
147
- async def read_data(jsonfiles: list[str]) -> AsyncGenerator:
148
  # Async generator to yield file content one by one
149
  for f in jsonfiles:
150
  async with aiofiles.open(f, "r") as infile:
@@ -152,10 +171,10 @@ async def read_data(jsonfiles: list[str]) -> AsyncGenerator:
152
  yield json.loads(content)
153
 
154
 
155
- async def process_server(server, services_collection, db):
 
156
  server_services = server.pop("services")
157
- server = await process_nested_entries_v2(server)
158
- server_md = process_metadata(
159
  server["metadata"],
160
  {
161
  "url": server["metadata"]["url"],
@@ -163,14 +182,9 @@ async def process_server(server, services_collection, db):
163
  },
164
  )
165
 
166
- services_to_insert = []
167
- layer_insert_tasks = []
168
-
169
  for service in server_services:
170
- service_layers = service["metadata"].pop("layers")
171
- service = await process_nested_entries_v2(service)
172
-
173
- service_md = process_metadata(
174
  service["metadata"],
175
  {
176
  "url": service["url"],
@@ -178,70 +192,43 @@ async def process_server(server, services_collection, db):
178
  "server": server_md,
179
  },
180
  )
181
- service_md["layers"] = []
182
-
183
- layer_dict = {}
184
-
185
- for layer in service_layers:
186
- layer = await process_nested_entries_v2(layer)
187
- layer_md = process_metadata(
188
- layer,
189
- {
190
- "url": layer["url"],
191
- "hash": create_uuid(layer["url"]),
192
- "service": service_md["id"],
193
- },
194
- )
195
- layer_type = get_type(layer)
196
- service_md["layers"].append(
197
- dict(type=layer_type, layer_id=layer_md["id"]),
198
- )
199
- if layer_type not in layer_dict:
200
- layer_dict[layer_type] = []
201
- layer_dict[layer_type].append(layer_md)
202
-
203
- services_to_insert.append(service_md)
204
-
205
- async def insert_layer(lyr_type: str, lyrs: list[dict]) -> None:
206
- if len(lyrs) > 0:
207
- try:
208
- await db[lyr_type].insert_many(lyrs)
209
- except OverflowError:
210
- for layer in lyrs:
211
- try:
212
- await db[lyr_type].insert_one(layer)
213
- except OverflowError:
214
- for c in ["drawingInfo", "classBreakInfos"]:
215
- if c in layer:
216
- del layer[c]
217
- await db[lyr_type].insert_one(layer)
218
-
219
- for layer_type, layers in layer_dict.items():
220
- if layers:
221
- layer_insert_tasks.append(
222
- insert_layer(lyr_type=layer_type, lyrs=layers),
223
- )
224
 
225
- # Insert services in batch
226
- if services_to_insert:
227
- await services_collection.insert_many(services_to_insert)
228
 
229
- # Wait for all layer insert tasks to complete
230
- await asyncio.gather(*layer_insert_tasks)
 
 
 
 
 
 
 
 
 
 
 
231
 
232
 
233
  async def main() -> None:
234
- output_dir = os.path.abspath("./output_tryagain")
235
  jsonfiles = glob(os.path.join(output_dir, "*.json"))
236
 
237
  client = AsyncIOMotorClient(
238
  r"mongodb://root:example@mongo:27017/",
239
  server_api=ServerApi("1"),
240
  )
241
-
242
  db = client["govgis-nov2023"]
243
- services_collection = db.services
244
 
245
  # Process each server concurrently
246
  async for server in read_data(jsonfiles):
247
- await process_server(server, services_collection, db)
 
 
 
 
 
3
  import os
4
  import random
5
  import uuid
 
6
  from glob import glob
7
+ from typing import AsyncGenerator, Optional
8
 
9
  import aiofiles
10
  import nest_asyncio
11
  from motor.motor_asyncio import AsyncIOMotorClient
12
+ from motor.motor_tornado import MotorCollection
13
  from pymongo.server_api import ServerApi
14
  from pyproj import CRS, Proj, Transformer
15
  from pyproj.exceptions import CRSError
 
19
  nest_asyncio.apply()
20
 
21
 
22
+ async def safe_insert_many(collection: MotorCollection, documents: list[dict]) -> None:
23
+ if len(documents) > 0:
24
+ try:
25
+ # Attempt to insert the documents
26
+ await collection.insert_many(documents)
27
+ except OverflowError:
28
+ # If an OverflowError occurs, split the batch
29
+ if len(documents) > 1:
30
+ mid = len(documents) // 2
31
+ # Recursively attempt to insert each half
32
+ await safe_insert_many(collection, documents[:mid])
33
+ await safe_insert_many(collection, documents[mid:])
34
+ else:
35
+ # Handle the case where a single document is too large
36
+ raise ValueError(
37
+ "A document exceeds the maximum BSON size or dtype conflict.",
38
+ )
39
+
40
+
41
  def create_uuid(input_str: str) -> str:
42
  # Consistent random UUIDs based on input string
43
  # https://nathanielknight.ca/articles/consistent_random_uuids_in_python.html
 
47
  )
48
 
49
 
50
+ def reproject_to_4326_and_convert_to_geojson(
51
  bbox: dict,
52
  ) -> dict:
53
  """
 
64
  def get_src_proj() -> Proj:
65
  sr = bbox["spatialReference"]
66
  for wkid in ["latestWkid", "wkid"]:
67
+ if (sr_wkid := sr.get(wkid)) is not None:
68
  for authority in ["EPSG", "ESRI"]:
69
  try:
70
+ return Proj(f"{authority}:{sr_wkid}")
71
  except CRSError:
72
  pass
73
+ if (sr_wkt := sr.get("wkt")) is not None:
74
+ return Proj(CRS.from_wkt(sr_wkt))
75
  raise ValueError("no spatialReference found")
76
 
77
  src_proj = get_src_proj()
 
105
  return geojson
106
 
107
 
108
+ keepkeys = {
109
+ "id",
110
+ "associatedlayers",
111
+ "domains",
112
+ "copyrighttext",
113
+ "description",
114
+ "documentinfo",
115
+ "fields",
116
+ "mapname",
117
+ "name",
118
+ "parentlayer",
119
+ "servicedescription",
120
+ "subLayers",
121
+ "tables",
122
+ "version",
123
+ "currentversion",
124
+ "geometrytype",
125
+ "extent",
126
+ "type",
127
+ "url",
128
+ "server",
129
+ "layers",
130
+ "service",
131
+ }
132
+
133
+
134
+ async def process_metadata(
135
+ metadata: dict,
136
+ additional_fields: Optional[dict] = None,
137
  ) -> dict:
138
+ additional_fields = additional_fields or {}
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
139
  # Process metadata and add any additional fields
140
  processed_md = {
141
  k: v for k, v in metadata.items() if k not in ["folders", "services", "layers"]
 
145
  processed_md["id"] = processed_md["hash"]
146
  del processed_md["hash"]
147
 
148
+ processed_md = {k: v for k, v in processed_md.items() if k in keepkeys}
149
+
150
+ if (
151
+ (extent := processed_md.get("extent")) is not None
152
+ and extent.get("spatialReference") is not None
153
+ and not any(str(v).lower() in {"nan", "none", "null"} for v in extent.values())
154
+ ):
155
+ try:
156
+ processed_md["extent"] = reproject_to_4326_and_convert_to_geojson(extent)
157
+ except (ValueError, GEOSException, CRSError) as e:
158
+ print(extent, e)
159
  return processed_md
160
 
161
 
 
163
  return layer.get("type", "unknown").lower().replace(" ", "_").strip()
164
 
165
 
166
+ async def read_data(jsonfiles: list[str]) -> AsyncGenerator[dict, None]:
167
  # Async generator to yield file content one by one
168
  for f in jsonfiles:
169
  async with aiofiles.open(f, "r") as infile:
 
171
  yield json.loads(content)
172
 
173
 
174
+ # Modified process_server function
175
+ async def process_server(server: dict, layers_collection: MotorCollection):
176
  server_services = server.pop("services")
177
+ server_md = await process_metadata(
 
178
  server["metadata"],
179
  {
180
  "url": server["metadata"]["url"],
 
182
  },
183
  )
184
 
185
+ layer_tasks = []
 
 
186
  for service in server_services:
187
+ service_md = await process_metadata(
 
 
 
188
  service["metadata"],
189
  {
190
  "url": service["url"],
 
192
  "server": server_md,
193
  },
194
  )
195
+ for layer in service["metadata"].pop("layers"):
196
+ task = asyncio.create_task(process_layer(layer, service_md))
197
+ layer_tasks.append(task)
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
198
 
199
+ layers_md = await asyncio.gather(*layer_tasks)
200
+ await safe_insert_many(layers_collection, layers_md)
 
201
 
202
+
203
+ # Modified process_layer function
204
+ async def process_layer(layer: dict, service: dict) -> dict:
205
+ # Embed service metadata into layer
206
+ layer_md = await process_metadata(
207
+ layer,
208
+ {
209
+ "url": layer["url"],
210
+ "hash": create_uuid(layer["url"]),
211
+ "service": {**service},
212
+ },
213
+ )
214
+ return layer_md
215
 
216
 
217
  async def main() -> None:
218
+ output_dir = os.path.abspath("/home/appuser/restgdf_api/lab/output_tryagain")
219
  jsonfiles = glob(os.path.join(output_dir, "*.json"))
220
 
221
  client = AsyncIOMotorClient(
222
  r"mongodb://root:example@mongo:27017/",
223
  server_api=ServerApi("1"),
224
  )
 
225
  db = client["govgis-nov2023"]
226
+ layers = db.layers
227
 
228
  # Process each server concurrently
229
  async for server in read_data(jsonfiles):
230
+ await process_server(server, layers)
231
+
232
+
233
+ if __name__ == "__main__":
234
+ asyncio.run(main())