Spaces:
Sleeping
Sleeping
Commit
·
0508b3e
1
Parent(s):
1288cce
feat!: run everything through main
Browse files- src/data_models/image_manager.py +7 -10
- src/data_models/openai_manager.py +65 -0
- src/data_models/park_manager.py +7 -6
- src/fetch_places.py +21 -5
- src/main.py +1 -2
- src/openai_predictions.py +2 -42
- src/predict.py +0 -58
src/data_models/image_manager.py
CHANGED
|
@@ -13,13 +13,13 @@ class ImageManager:
|
|
| 13 |
"""Initialise connection and session."""
|
| 14 |
self.engine, self.session = get_db_connection()
|
| 15 |
|
| 16 |
-
def add_image(self, name, created_at, park_id=None):
|
| 17 |
"""
|
| 18 |
Add an image to the `images` table.
|
| 19 |
|
| 20 |
Args:
|
| 21 |
name (str): Image name.
|
| 22 |
-
created_at (
|
| 23 |
park_id (int):id of the park where the image was taken.
|
| 24 |
Returns:
|
| 25 |
dict: Information of the added image.
|
|
@@ -28,18 +28,15 @@ class ImageManager:
|
|
| 28 |
"""
|
| 29 |
INSERT INTO images (name, created_at, park_id)
|
| 30 |
VALUES (:name, :created_at, :park_id)
|
|
|
|
| 31 |
"""
|
| 32 |
)
|
| 33 |
try:
|
| 34 |
-
self.session.execute(
|
| 35 |
query, {"name": name, "created_at": created_at, "park_id": park_id}
|
| 36 |
-
)
|
| 37 |
self.session.commit()
|
| 38 |
-
return
|
| 39 |
-
"name": name,
|
| 40 |
-
"created_at": created_at,
|
| 41 |
-
"park_id": park_id,
|
| 42 |
-
}
|
| 43 |
except Exception as e:
|
| 44 |
self.session.rollback()
|
| 45 |
raise Exception(f"An error occurred while adding the image: {e}")
|
|
@@ -54,7 +51,7 @@ class ImageManager:
|
|
| 54 |
"""
|
| 55 |
query = text("SELECT id FROM images WHERE name = :image_name")
|
| 56 |
try:
|
| 57 |
-
result = self.session.execute(query, {"image_name": image_name})
|
| 58 |
return result[0] if result else None
|
| 59 |
except Exception as e:
|
| 60 |
raise Exception(f"An error occurred while getting the image ID: {e}")
|
|
|
|
| 13 |
"""Initialise connection and session."""
|
| 14 |
self.engine, self.session = get_db_connection()
|
| 15 |
|
| 16 |
+
def add_image(self, name: str, created_at: datetime, park_id=None) -> int:
|
| 17 |
"""
|
| 18 |
Add an image to the `images` table.
|
| 19 |
|
| 20 |
Args:
|
| 21 |
name (str): Image name.
|
| 22 |
+
created_at (datetime): Image creation date.
|
| 23 |
park_id (int):id of the park where the image was taken.
|
| 24 |
Returns:
|
| 25 |
dict: Information of the added image.
|
|
|
|
| 28 |
"""
|
| 29 |
INSERT INTO images (name, created_at, park_id)
|
| 30 |
VALUES (:name, :created_at, :park_id)
|
| 31 |
+
RETURNING id
|
| 32 |
"""
|
| 33 |
)
|
| 34 |
try:
|
| 35 |
+
response = self.session.execute(
|
| 36 |
query, {"name": name, "created_at": created_at, "park_id": park_id}
|
| 37 |
+
).fetchone()
|
| 38 |
self.session.commit()
|
| 39 |
+
return response[0]
|
|
|
|
|
|
|
|
|
|
|
|
|
| 40 |
except Exception as e:
|
| 41 |
self.session.rollback()
|
| 42 |
raise Exception(f"An error occurred while adding the image: {e}")
|
|
|
|
| 51 |
"""
|
| 52 |
query = text("SELECT id FROM images WHERE name = :image_name")
|
| 53 |
try:
|
| 54 |
+
result = self.session.execute(query, {"image_name": image_name})
|
| 55 |
return result[0] if result else None
|
| 56 |
except Exception as e:
|
| 57 |
raise Exception(f"An error occurred while getting the image ID: {e}")
|
src/data_models/openai_manager.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Manage bounding boxes in the database."""
|
| 2 |
+
|
| 3 |
+
from sqlalchemy import text
|
| 4 |
+
import json
|
| 5 |
+
from data_models.sql_connection import get_db_connection
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
class OpenAIManager:
|
| 9 |
+
def __init__(self):
|
| 10 |
+
"""Initialise connection and session."""
|
| 11 |
+
self.engine, self.session = get_db_connection()
|
| 12 |
+
|
| 13 |
+
def add_predictions(
|
| 14 |
+
self,
|
| 15 |
+
img_id,
|
| 16 |
+
predictions,
|
| 17 |
+
):
|
| 18 |
+
"""
|
| 19 |
+
Add predictions to the `openai_predictions` table.
|
| 20 |
+
|
| 21 |
+
Args:
|
| 22 |
+
img_id (int): ID of the image where the bounding box was detected.
|
| 23 |
+
built_elements (dict): Built elements detected in the image.
|
| 24 |
+
fauna_identification (dict): Fauna identification detected in the image.
|
| 25 |
+
human_activity (dict): Human activity detected in the image.
|
| 26 |
+
human_detection (dict): Humans detected in the image.
|
| 27 |
+
vegetation_detection (dict): Vegetation detected in the image.
|
| 28 |
+
water_elements (dict): Water elements detected in the image.
|
| 29 |
+
"""
|
| 30 |
+
query = text(
|
| 31 |
+
"""
|
| 32 |
+
INSERT INTO openai_predictions (img_id, built_elements, fauna_identification, human_activity, human_detection, vegetation_detection, water_elements)
|
| 33 |
+
VALUES (:img_id, :built_elements, :fauna_identification, :human_activity, :human_detection, :vegetation_detection, :water_elements)
|
| 34 |
+
"""
|
| 35 |
+
)
|
| 36 |
+
try:
|
| 37 |
+
self.session.execute(
|
| 38 |
+
query,
|
| 39 |
+
{
|
| 40 |
+
"img_id": img_id,
|
| 41 |
+
"built_elements": json.dumps(predictions["built_elements"]),
|
| 42 |
+
"fauna_identification": json.dumps(predictions["fauna_identification"]),
|
| 43 |
+
"human_activity": json.dumps(predictions["human_activity"]),
|
| 44 |
+
"human_detection": json.dumps(predictions["human_detection"]),
|
| 45 |
+
"vegetation_detection": json.dumps(predictions["vegetation_detection"]),
|
| 46 |
+
"water_elements": json.dumps(predictions["water_elements"])
|
| 47 |
+
},
|
| 48 |
+
)
|
| 49 |
+
self.session.commit()
|
| 50 |
+
return {
|
| 51 |
+
"img_id": img_id,
|
| 52 |
+
"built_elements": predictions["built_elements"],
|
| 53 |
+
"fauna_identification": predictions["fauna_identification"],
|
| 54 |
+
"human_activity": predictions["human_activity"],
|
| 55 |
+
"human_detection": predictions["human_detection"],
|
| 56 |
+
"vegetation_detection": predictions["vegetation_detection"],
|
| 57 |
+
"water_elements": predictions["water_elements"],
|
| 58 |
+
}
|
| 59 |
+
except Exception as e:
|
| 60 |
+
self.session.rollback()
|
| 61 |
+
raise Exception(f"An error occurred while adding the predictions: {e}")
|
| 62 |
+
|
| 63 |
+
def close_connection(self):
|
| 64 |
+
"""Close the connection."""
|
| 65 |
+
self.session.close()
|
src/data_models/park_manager.py
CHANGED
|
@@ -10,17 +10,18 @@ class ParkManager:
|
|
| 10 |
"""Initialise connection and session."""
|
| 11 |
self.engine, self.session = get_db_connection()
|
| 12 |
|
| 13 |
-
def add_park(self, name):
|
| 14 |
query = text(
|
| 15 |
"""
|
| 16 |
INSERT INTO parks (name)
|
| 17 |
VALUES (:name)
|
|
|
|
| 18 |
"""
|
| 19 |
)
|
| 20 |
try:
|
| 21 |
-
self.session.execute(query, {"name": name})
|
| 22 |
self.session.commit()
|
| 23 |
-
return
|
| 24 |
except Exception as e:
|
| 25 |
self.session.rollback()
|
| 26 |
raise Exception(f"An error occurred while adding the park: {e}")
|
|
@@ -32,12 +33,12 @@ class ParkManager:
|
|
| 32 |
Returns:
|
| 33 |
list[dict]: list of parks.
|
| 34 |
"""
|
| 35 |
-
query = text("SELECT * FROM parks")
|
| 36 |
try:
|
|
|
|
| 37 |
result = self.session.execute(query)
|
| 38 |
-
return [
|
| 39 |
except Exception as e:
|
| 40 |
-
raise Exception(f"An error occurred while
|
| 41 |
|
| 42 |
def get_park_id(self, park_name):
|
| 43 |
"""Get the park ID from the park name.
|
|
|
|
| 10 |
"""Initialise connection and session."""
|
| 11 |
self.engine, self.session = get_db_connection()
|
| 12 |
|
| 13 |
+
def add_park(self, name) -> int:
|
| 14 |
query = text(
|
| 15 |
"""
|
| 16 |
INSERT INTO parks (name)
|
| 17 |
VALUES (:name)
|
| 18 |
+
RETURNING id
|
| 19 |
"""
|
| 20 |
)
|
| 21 |
try:
|
| 22 |
+
response = self.session.execute(query, {"name": name})
|
| 23 |
self.session.commit()
|
| 24 |
+
return response.all()[0][0]
|
| 25 |
except Exception as e:
|
| 26 |
self.session.rollback()
|
| 27 |
raise Exception(f"An error occurred while adding the park: {e}")
|
|
|
|
| 33 |
Returns:
|
| 34 |
list[dict]: list of parks.
|
| 35 |
"""
|
|
|
|
| 36 |
try:
|
| 37 |
+
query = text("SELECT * FROM parks")
|
| 38 |
result = self.session.execute(query)
|
| 39 |
+
return [row for row in result.fetchall()]
|
| 40 |
except Exception as e:
|
| 41 |
+
raise Exception(f"An error occurred while fetching the parks: {e}")
|
| 42 |
|
| 43 |
def get_park_id(self, park_name):
|
| 44 |
"""Get the park ID from the park name.
|
src/fetch_places.py
CHANGED
|
@@ -1,8 +1,18 @@
|
|
| 1 |
import os
|
| 2 |
import requests
|
| 3 |
import json
|
|
|
|
| 4 |
|
| 5 |
from typing import Tuple
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
|
| 8 |
def nearby_search(coordinates: tuple) -> Tuple[str, str, dict]:
|
|
@@ -110,7 +120,7 @@ def fetch_photo(place_id: str, photo_id: str, max_width: int, max_height: int) -
|
|
| 110 |
return response.content
|
| 111 |
|
| 112 |
|
| 113 |
-
def fetch_place_photos(place_id: str, place_name:str, photos: dict
|
| 114 |
"""Fetch all photos from place datas
|
| 115 |
|
| 116 |
Args:
|
|
@@ -122,19 +132,25 @@ def fetch_place_photos(place_id: str, place_name:str, photos: dict, folder: str
|
|
| 122 |
Returns:
|
| 123 |
str: Place ID
|
| 124 |
"""
|
| 125 |
-
folder_path = f"{place_name}
|
| 126 |
os.makedirs(folder_path, exist_ok=True)
|
| 127 |
|
|
|
|
| 128 |
for photo in photos:
|
| 129 |
photo_id = photo["name"].replace(f"places/{place_id}/photos/", "")
|
|
|
|
| 130 |
|
| 131 |
max_width = photo["widthPx"]
|
| 132 |
max_height = photo["heightPx"]
|
| 133 |
|
| 134 |
photo_binary = fetch_photo(place_id, photo_id, max_width, max_height)
|
| 135 |
-
|
|
|
|
| 136 |
f.write(photo_binary)
|
| 137 |
|
| 138 |
-
|
|
|
|
|
|
|
| 139 |
|
| 140 |
-
|
|
|
|
|
|
| 1 |
import os
|
| 2 |
import requests
|
| 3 |
import json
|
| 4 |
+
from datetime import datetime
|
| 5 |
|
| 6 |
from typing import Tuple
|
| 7 |
+
from data_models.park_manager import ParkManager
|
| 8 |
+
from data_models.image_manager import ImageManager
|
| 9 |
+
from data_models.openai_manager import OpenAIManager
|
| 10 |
+
from openai_predictions import process_agent_predictions
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
park_manager = ParkManager()
|
| 14 |
+
image_manager = ImageManager()
|
| 15 |
+
prediction_manager = OpenAIManager()
|
| 16 |
|
| 17 |
|
| 18 |
def nearby_search(coordinates: tuple) -> Tuple[str, str, dict]:
|
|
|
|
| 120 |
return response.content
|
| 121 |
|
| 122 |
|
| 123 |
+
def fetch_place_photos(place_id: str, place_name: str, photos: dict) -> int:
|
| 124 |
"""Fetch all photos from place datas
|
| 125 |
|
| 126 |
Args:
|
|
|
|
| 132 |
Returns:
|
| 133 |
str: Place ID
|
| 134 |
"""
|
| 135 |
+
folder_path = f"images/{place_name}"
|
| 136 |
os.makedirs(folder_path, exist_ok=True)
|
| 137 |
|
| 138 |
+
park_id = park_manager.add_park(place_name)
|
| 139 |
for photo in photos:
|
| 140 |
photo_id = photo["name"].replace(f"places/{place_id}/photos/", "")
|
| 141 |
+
photo_id = photo_id.replace("/", "_")
|
| 142 |
|
| 143 |
max_width = photo["widthPx"]
|
| 144 |
max_height = photo["heightPx"]
|
| 145 |
|
| 146 |
photo_binary = fetch_photo(place_id, photo_id, max_width, max_height)
|
| 147 |
+
file_name = f"{folder_path}/{photo_id[:150]}.jpg"
|
| 148 |
+
with open(file_name, "wb") as f:
|
| 149 |
f.write(photo_binary)
|
| 150 |
|
| 151 |
+
image_id = image_manager.add_image(file_name, datetime.now(), park_id=park_id)
|
| 152 |
+
predictions = process_agent_predictions(file_name)
|
| 153 |
+
prediction_manager.add_predictions(image_id, predictions)
|
| 154 |
|
| 155 |
+
print(f"{len(photos)} photos fetched for place: {place_name}")
|
| 156 |
+
return len(photos)
|
src/main.py
CHANGED
|
@@ -7,11 +7,10 @@ if __name__ == "__main__":
|
|
| 7 |
file_path = "datas/Espaces_verts_SCAI.kml"
|
| 8 |
kml_data = parse_kml(file_path)
|
| 9 |
coordinates = get_coordinates(kml_data)
|
| 10 |
-
print(coordinates["Al Khaldiyah Park"])
|
| 11 |
|
| 12 |
for place_name, coordinate in tqdm(coordinates.items()):
|
| 13 |
try:
|
| 14 |
place_id, _, photos = nearby_search(coordinate)
|
| 15 |
-
|
| 16 |
except Exception as e:
|
| 17 |
print(f"Error: {e} for {place_name}")
|
|
|
|
| 7 |
file_path = "datas/Espaces_verts_SCAI.kml"
|
| 8 |
kml_data = parse_kml(file_path)
|
| 9 |
coordinates = get_coordinates(kml_data)
|
|
|
|
| 10 |
|
| 11 |
for place_name, coordinate in tqdm(coordinates.items()):
|
| 12 |
try:
|
| 13 |
place_id, _, photos = nearby_search(coordinate)
|
| 14 |
+
fetch_place_photos(place_id, place_name, photos)
|
| 15 |
except Exception as e:
|
| 16 |
print(f"Error: {e} for {place_name}")
|
src/openai_predictions.py
CHANGED
|
@@ -1,9 +1,8 @@
|
|
| 1 |
import os
|
| 2 |
import base64
|
|
|
|
| 3 |
|
| 4 |
from openai_agent import Agent
|
| 5 |
-
from data_models.park_manager import ParkManager
|
| 6 |
-
from data_models.image_manager import ImageManager, add_image_to_db
|
| 7 |
|
| 8 |
agent = Agent("./prompts")
|
| 9 |
|
|
@@ -17,7 +16,7 @@ def encode_image(image_path: str):
|
|
| 17 |
return base64.b64encode(image_file.read()).decode("utf-8")
|
| 18 |
|
| 19 |
|
| 20 |
-
def
|
| 21 |
base64_image = encode_image(file_path)
|
| 22 |
|
| 23 |
prompts = [
|
|
@@ -38,42 +37,3 @@ def _process_agent_predictions(file_path):
|
|
| 38 |
responses[prompt] = response.as_json()
|
| 39 |
|
| 40 |
return responses
|
| 41 |
-
|
| 42 |
-
|
| 43 |
-
def process_agent_predictions(list_folder: list[str]):
|
| 44 |
-
"""
|
| 45 |
-
Process predictions for a list of image folders, adding images and bounding boxes to the database.
|
| 46 |
-
|
| 47 |
-
Args:
|
| 48 |
-
list_folder (list): List of image folders.
|
| 49 |
-
model_file (str): Path to the YOLO model.
|
| 50 |
-
|
| 51 |
-
Raises:
|
| 52 |
-
Exception: If an error occurs during processing.
|
| 53 |
-
"""
|
| 54 |
-
park_manager = ParkManager()
|
| 55 |
-
image_manager = ImageManager()
|
| 56 |
-
print(f"Initializing processing of predictions...")
|
| 57 |
-
|
| 58 |
-
for folder in list_folder:
|
| 59 |
-
print(f"Processing folder: {folder}...")
|
| 60 |
-
park_name = folder.split("/")[-1]
|
| 61 |
-
print(park_name)
|
| 62 |
-
|
| 63 |
-
print(f"Retrieving park ID for '{park_name}'...")
|
| 64 |
-
park_id = park_manager.get_park_id(park_name)
|
| 65 |
-
if not park_id:
|
| 66 |
-
print(f"Park '{park_name}' not found in the database. Skipping folder.")
|
| 67 |
-
continue
|
| 68 |
-
|
| 69 |
-
for file in os.listdir(folder):
|
| 70 |
-
file_path = os.path.join(folder, file)
|
| 71 |
-
print(file_path)
|
| 72 |
-
predictions = _process_agent_predictions(file_path)
|
| 73 |
-
|
| 74 |
-
for result in predictions:
|
| 75 |
-
print(f"Processing result for image: {result.path.split('/')[-1]}...")
|
| 76 |
-
if
|
| 77 |
-
image_id = add_image_to_db(result, image_manager, park_id)
|
| 78 |
-
|
| 79 |
-
print(f"Processing complete. Closing database connections.")
|
|
|
|
| 1 |
import os
|
| 2 |
import base64
|
| 3 |
+
from tqdm import tqdm
|
| 4 |
|
| 5 |
from openai_agent import Agent
|
|
|
|
|
|
|
| 6 |
|
| 7 |
agent = Agent("./prompts")
|
| 8 |
|
|
|
|
| 16 |
return base64.b64encode(image_file.read()).decode("utf-8")
|
| 17 |
|
| 18 |
|
| 19 |
+
def process_agent_predictions(file_path):
|
| 20 |
base64_image = encode_image(file_path)
|
| 21 |
|
| 22 |
prompts = [
|
|
|
|
| 37 |
responses[prompt] = response.as_json()
|
| 38 |
|
| 39 |
return responses
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
src/predict.py
DELETED
|
@@ -1,58 +0,0 @@
|
|
| 1 |
-
import os
|
| 2 |
-
from tqdm import tqdm
|
| 3 |
-
from yolo_predictions import process_YOLO_predictions
|
| 4 |
-
from openai_predictions import process_agent_predictions
|
| 5 |
-
from data_models.park_manager import ParkManager
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
def load_images_from_folder(folder):
|
| 9 |
-
"""
|
| 10 |
-
Load images from a folder.
|
| 11 |
-
|
| 12 |
-
Args:
|
| 13 |
-
folder (str): Path to the folder.
|
| 14 |
-
|
| 15 |
-
Returns:
|
| 16 |
-
list: List of image paths.
|
| 17 |
-
"""
|
| 18 |
-
for file in os.listdir(folder):
|
| 19 |
-
if file.endswith(".jpg") or file.endswith(".png"):
|
| 20 |
-
yield os.path.join(folder, file)
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
def process_predictions(list_folder, model_file):
|
| 24 |
-
"""
|
| 25 |
-
Process predictions for a list of image folders, adding images and bounding boxes to the database.
|
| 26 |
-
|
| 27 |
-
Args:
|
| 28 |
-
list_folder (list): List of image folders.
|
| 29 |
-
model_file (str): Path to the YOLO model.
|
| 30 |
-
|
| 31 |
-
Raises:
|
| 32 |
-
Exception: If an error occurs during processing.
|
| 33 |
-
"""
|
| 34 |
-
park_manager = ParkManager()
|
| 35 |
-
|
| 36 |
-
for folder in list_folder:
|
| 37 |
-
print(f"Loading images from folder: {folder}...")
|
| 38 |
-
|
| 39 |
-
park_name = folder.split("/")[-1]
|
| 40 |
-
|
| 41 |
-
print(f"Retrieving park ID for '{park_name}'...")
|
| 42 |
-
park_id = park_manager.get_park_id(park_name)
|
| 43 |
-
if not park_id:
|
| 44 |
-
print(f"Park '{park_name}' not found in the database. Skipping folder.")
|
| 45 |
-
continue
|
| 46 |
-
|
| 47 |
-
for image in tqdm(load_images_from_folder(folder)):
|
| 48 |
-
process_YOLO_predictions(park_id, image, model_file)
|
| 49 |
-
# process_agent_predictions(park_id, image)
|
| 50 |
-
print(f"Folders {folder} processed successfully!")
|
| 51 |
-
park_manager.close_connection()
|
| 52 |
-
|
| 53 |
-
|
| 54 |
-
if __name__ == "__main__":
|
| 55 |
-
list_folder = ["./images/Al Khaldiyah Park", "./images/Family Park"]
|
| 56 |
-
model_file = "yolo11s.pt"
|
| 57 |
-
|
| 58 |
-
process_predictions(list_folder, model_file)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|