{"nbformat":4,"nbformat_minor":0,"metadata":{"colab":{"provenance":[],"machine_shape":"hm","gpuType":"L4","authorship_tag":"ABX9TyPRgRq+CmOekg2XVhIIL5we"},"kernelspec":{"name":"python3","display_name":"Python 3"},"language_info":{"name":"python"},"accelerator":"GPU"},"cells":[{"cell_type":"code","source":["# -*- coding: utf-8 -*-\n","\"\"\"\n","Tutorial: Efficient Deep Learning Systems - Intro & Benchmarking\n","\n","Based on the lecture by Max Ryabinin.\n","This notebook explores fundamental concepts of GPU execution and benchmarking\n","in PyTorch, relevant for building efficient DL systems.\n","\"\"\"\n","\n","# %% [markdown]\n","# # Efficient Deep Learning Systems: Introduction & Benchmarking in PyTorch\n","#\n","# This notebook provides practical examples related to the introductory concepts covered in the \"Efficient Deep Learning Systems\" course. We'll touch upon:\n","#\n","# 1. **GPU Architecture Basics (Briefly):** Understanding the high-level differences between CPU and GPU execution.\n","# 2. **CUDA Execution Model:** How PyTorch interacts with the GPU (asynchronous execution).\n","# 3. **Memory Access:** Host-to-Device (H2D) and Device-to-Host (D2H) transfers, and pinned memory.\n","# 4. **Benchmarking:** How to measure performance correctly, considering synchronization and warm-up.\n","# 5. **Input Shape Effects:** How performance can vary based on tensor dimensions (Tile/Wave Quantization effects).\n","# 6. **PyTorch Utilities:** Using `torch.utils.benchmark`.\n","#\n","# **Prerequisites:**\n","# - PyTorch installed (`pip install torch torchvision torchaudio`)\n","# - A CUDA-enabled GPU recognized by PyTorch\n","# - `numpy` and `matplotlib` (`pip install numpy matplotlib`)\n","\n","# %%\n","import torch\n","import numpy as np\n","import time\n","import matplotlib.pyplot as plt\n","import timeit\n","\n","# %% [markdown]\n","# ## 1. Setup: Check GPU Availability\n","#\n","# First, let's verify that PyTorch can access the CUDA GPU.\n","\n","# %%\n","if torch.cuda.is_available():\n"," device = torch.device(\"cuda\")\n"," print(f\"CUDA device detected: {torch.cuda.get_device_name(0)}\")\n"," print(f\"Total GPU Memory: {torch.cuda.get_device_properties(0).total_memory / (1024**3):.2f} GB\")\n","else:\n"," device = torch.device(\"cpu\")\n"," print(\"CUDA device not found, using CPU.\")\n"," # Many examples below are GPU-specific, execution might fail or be slow.\n","\n","# Ensure we have a GPU for the relevant parts\n","use_gpu = torch.cuda.is_available()\n","\n","# %% [markdown]\n","# ## 2. GPU Architecture & Execution Model Basics (Conceptual Review)\n","#\n","# - **CPU vs. GPU:** CPUs have a few powerful cores optimized for serial tasks, with large caches. GPUs have thousands of simpler cores optimized for parallel tasks (SIMT - Single Instruction, Multiple Thread). (See Slide 9)\n","# - **CUDA Model:** PyTorch uses CUDA (or ROCm on AMD) to run operations (kernels) on the GPU. Work is launched from the host (CPU) to the device (GPU). Kernels are executed by threads, grouped into blocks, which form a grid. These blocks are scheduled onto Streaming Multiprocessors (SMs). (See Slide 10)\n","# - **Warps:** Threads execute in groups called warps (typically 32 threads). All threads in a warp execute the same instruction. Branching within a warp (where different threads take different paths) can reduce efficiency as all paths might need to be executed. (See Slide 11)\n","# - **Key Takeaway:** GPUs achieve speed through massive parallelism. Efficient code keeps these cores busy.\n","\n","# %% [markdown]\n","# ## 3. Asynchronous Execution & Synchronization\n","#\n","# By default, CUDA operations (kernel launches, memory copies initiated from the CPU) are **asynchronous**. The CPU queues the operation and returns control immediately, *before* the GPU has finished. This hides latency and allows the CPU and GPU to work in parallel. (See Slide 16, 17)\n","#\n","# **Problem:** If we want to measure GPU execution time accurately, simply timing the Python call is wrong because it doesn't wait for the GPU to finish.\n","#\n","# **Solution:** We need to explicitly synchronize the CPU with the GPU using `torch.cuda.synchronize()`.\n","\n","# %%\n","if use_gpu:\n"," # Example: Matrix Multiplication\n"," size = 2048\n"," a = torch.randn(size, size, device=device)\n"," b = torch.randn(size, size, device=device)\n","\n"," # --- Incorrect Timing (No Synchronization) ---\n"," start_time = time.time()\n"," c = torch.matmul(a, b)\n"," end_time = time.time()\n"," print(f\"Incorrect MM time (no sync): {(end_time - start_time) * 1000:.4f} ms\")\n"," # This primarily measures kernel launch overhead, not execution time.\n","\n"," # --- Correct Timing (With Synchronization) ---\n"," # We need to sync *before* starting and *after* finishing\n"," # to ensure the measured interval only contains the operation.\n"," torch.cuda.synchronize() # Ensure previous work is done (optional here, but good practice)\n"," start_time = time.time()\n"," c = torch.matmul(a, b)\n"," torch.cuda.synchronize() # Wait for matmul kernel to finish\n"," end_time = time.time()\n"," print(f\"Correct MM time (with sync): {(end_time - start_time) * 1000:.4f} ms\")\n","\n"," # Using timeit (more robust for short durations)\n"," # Note: timeit runs the code multiple times. Synchronization is needed *within* the timed statement.\n"," stmt = \"torch.matmul(a, b); torch.cuda.synchronize()\"\n"," setup = \"import torch; size=2048; device=torch.device('cuda'); torch.cuda.synchronize(); a = torch.randn(size, size, device=device); b = torch.randn(size, size, device=device); torch.cuda.synchronize()\"\n"," num_runs = 10\n"," timer = timeit.Timer(stmt=stmt, setup=setup)\n"," avg_time_ms = timer.timeit(number=num_runs) / num_runs * 1000\n"," print(f\"Correct MM time (timeit, avg over {num_runs} runs): {avg_time_ms:.4f} ms\")\n","\n","else:\n"," print(\"Skipping asynchronous execution demo (requires GPU).\")\n","\n","# %% [markdown]\n","# ### Synchronization Triggered by Data Transfer\n","#\n","# Copying data from GPU to CPU (e.g., using `.cpu()` or `.item()`) implicitly synchronizes the specific stream the tensor is on, as the CPU needs the result. (See Slide 17)\n","\n","# %%\n","if use_gpu:\n"," size = 1024\n"," a_gpu = torch.randn(size, size, device=device)\n"," b_gpu = torch.randn(size, size, device=device)\n","\n"," # Perform an operation\n"," c_gpu = torch.matmul(a_gpu, b_gpu)\n","\n"," # Time the operation *plus* the copy back to CPU\n"," start_time = time.time()\n"," c_cpu = c_gpu.cpu() # This forces synchronization for c_gpu\n"," end_time = time.time()\n"," print(f\"Time for matmul + .cpu() transfer: {(end_time - start_time) * 1000:.4f} ms\")\n","\n"," # Similarly, .item() on a 1-element tensor synchronizes\n"," result_gpu = torch.sum(c_gpu)\n"," start_time = time.time()\n"," result_val = result_gpu.item() # This forces synchronization for result_gpu\n"," end_time = time.time()\n"," print(f\"Time for sum + .item(): {(end_time - start_time) * 1000:.4f} ms\")\n"," print(f\"Sum result: {result_val}\") # The CPU now has the result\n","\n","else:\n"," print(\"Skipping .cpu()/.item() sync demo (requires GPU).\")\n","\n","\n","# %% [markdown]\n","# ## 4. Benchmarking Considerations: Warm-up\n","#\n","# The first time a CUDA operation runs, there might be extra overhead (e.g., kernel loading, context initialization, memory allocations). Subsequent runs are often faster. Therefore, it's crucial to perform \"warm-up\" runs before starting actual measurements. (See Slide 18)\n","\n","# %%\n","if use_gpu:\n"," size = 1536\n"," a = torch.randn(size, size, device=device)\n"," b = torch.randn(size, size, device=device)\n","\n"," print(\"Benchmarking with Warm-up:\")\n","\n"," # Warm-up runs\n"," print(\"Warm-up runs...\")\n"," for _ in range(3):\n"," c = torch.matmul(a, b)\n"," torch.cuda.synchronize() # Ensure each warm-up completes\n","\n"," # Actual measurement runs\n"," print(\"Measurement runs...\")\n"," times = []\n"," num_runs = 10\n"," for _ in range(num_runs):\n"," torch.cuda.synchronize()\n"," start_time = time.time()\n"," c = torch.matmul(a, b)\n"," torch.cuda.synchronize()\n"," end_time = time.time()\n"," times.append((end_time - start_time) * 1000) # milliseconds\n","\n"," print(f\"Average time over {num_runs} measurement runs: {np.mean(times):.4f} ms\")\n"," print(f\"Standard deviation: {np.std(times):.4f} ms\")\n","\n","else:\n"," print(\"Skipping warm-up demo (requires GPU).\")\n","\n","# %% [markdown]\n","# ## 5. Memory Access: Host <-> Device Transfers & Pinned Memory\n","#\n","# Data must be explicitly copied between the CPU's RAM (host memory) and the GPU's RAM (device memory). These copies happen over the PCIe bus, which can be a bottleneck. (See Slide 14)\n","#\n","# - **H2D:** Host to Device (`.to(device)` or `.cuda()`)\n","# - **D2H:** Device to Host (`.cpu()`)\n","#\n","# Standard host memory allocated by Python/PyTorch is **pageable**. The OS can move it around in physical RAM or swap it to disk. CUDA transfers from pageable memory require an extra internal copy to a **pinned** (or page-locked) buffer before the transfer can start via DMA (Direct Memory Access).\n","#\n","# Allocating host memory as **pinned** avoids this extra copy, potentially speeding up H2D transfers. D2H transfers are often faster *into* pinned memory as well.\n","#\n","# **How to use pinned memory:**\n","# - Create a tensor directly in pinned memory: `torch.empty(..., pin_memory=True)`\n","# - Copy an existing CPU tensor to pinned memory: `cpu_tensor.pin_memory()`\n","# - Use `pin_memory=True` in `torch.utils.data.DataLoader`.\n","\n","# %%\n","if use_gpu:\n"," size_bytes = 128 * 1024 * 1024 # 128 MB\n"," elements = size_bytes // 4 # Assuming float32 (4 bytes)\n"," cpu_tensor_pageable = torch.randn(elements, device='cpu')\n"," # Create a tensor directly in pinned memory\n"," cpu_tensor_pinned = torch.empty(elements, device='cpu', pin_memory=True)\n"," cpu_tensor_pinned.copy_(cpu_tensor_pageable) # Copy data into it\n","\n"," # --- Benchmark H2D transfer ---\n"," num_runs = 10\n"," warmup = 2\n","\n"," def benchmark_h2d(tensor):\n"," times = []\n"," # Warmup\n"," for _ in range(warmup):\n"," gpu_tensor = tensor.to(device)\n"," torch.cuda.synchronize()\n"," # Measure\n"," for _ in range(num_runs):\n"," torch.cuda.synchronize()\n"," start = time.time()\n"," gpu_tensor = tensor.to(device)\n"," torch.cuda.synchronize()\n"," end = time.time()\n"," times.append((end - start) * 1000) # ms\n"," del gpu_tensor # Free GPU memory\n"," torch.cuda.empty_cache()\n"," return np.mean(times)\n","\n"," time_pageable = benchmark_h2d(cpu_tensor_pageable)\n"," time_pinned = benchmark_h2d(cpu_tensor_pinned)\n","\n"," print(f\"H2D Transfer Time ({size_bytes / (1024**2):.0f} MB):\")\n"," print(f\" Pageable Memory: {time_pageable:.4f} ms\")\n"," print(f\" Pinned Memory: {time_pinned:.4f} ms\")\n"," if time_pinned < time_pageable:\n"," print(f\" Speedup: {time_pageable / time_pinned:.2f}x\")\n"," else:\n"," print(\" (No speedup observed in this run)\")\n","\n"," # --- Benchmark D2H transfer ---\n"," gpu_tensor = cpu_tensor_pageable.to(device) # Start with data on GPU\n","\n"," def benchmark_d2h(target_cpu_tensor):\n"," times = []\n"," # Warmup\n"," for _ in range(warmup):\n"," target_cpu_tensor.copy_(gpu_tensor) # Copy D2H into target\n"," torch.cuda.synchronize() # Not strictly needed for D2H but good practice\n"," # Measure\n"," for _ in range(num_runs):\n"," # No sync needed before D2H usually, but sync *after* to ensure completion\n"," start = time.time()\n"," target_cpu_tensor.copy_(gpu_tensor)\n"," # Sync isn't strictly necessary for D2H timing itself,\n"," # as copy_ implies some level of sync, but let's be safe.\n"," torch.cuda.synchronize()\n"," end = time.time()\n"," times.append((end - start) * 1000)\n"," return np.mean(times)\n","\n"," time_d2h_pageable = benchmark_d2h(torch.empty_like(cpu_tensor_pageable))\n"," time_d2h_pinned = benchmark_d2h(torch.empty_like(cpu_tensor_pinned, pin_memory=True))\n","\n"," print(f\"\\nD2H Transfer Time ({size_bytes / (1024**2):.0f} MB):\")\n"," print(f\" To Pageable Memory: {time_d2h_pageable:.4f} ms\")\n"," print(f\" To Pinned Memory: {time_d2h_pinned:.4f} ms\")\n"," if time_d2h_pinned < time_d2h_pageable:\n"," print(f\" Speedup: {time_d2h_pageable / time_d2h_pinned:.2f}x\")\n"," else:\n"," print(\" (No speedup observed in this run)\")\n","\n"," del gpu_tensor\n"," torch.cuda.empty_cache()\n","\n","else:\n"," print(\"Skipping memory transfer demo (requires GPU).\")\n","\n","# %% [markdown]\n","# **Note on Pinned Memory:** Allocating too much pinned memory can degrade overall system performance because it reduces the amount of memory the OS can manage (e.g., page out). Use it judiciously, primarily for buffers involved in frequent H2D/D2H transfers (like in data loading pipelines).\n","\n","# %% [markdown]\n","# ## 6. Input Shape Effects & `cudnn.benchmark`\n","#\n","# GPU performance, especially for operations like convolutions and matrix multiplications handled by libraries like cuDNN, can be sensitive to input tensor shapes (sizes, strides). (See Slides 12, 13)\n","#\n","# - **Tile/Wave Quantization:** Hardware resources (SMs, memory bandwidth) are often utilized most efficiently when problem sizes align well with the hardware's internal tiling or scheduling strategies. Performance might not scale smoothly and can sometimes exhibit step-like behavior as dimensions change.\n","# - **`torch.backends.cudnn.benchmark = True`:** This tells cuDNN to run benchmarks for different algorithms for the specific input sizes encountered during the *first* pass of an operation (like `nn.Conv2d`). It then caches the fastest algorithm for those specific sizes.\n","#\n","# **Use `cudnn.benchmark = True` if:**\n","# - Your input sizes (batch size, image dimensions, etc.) are **fixed** throughout training/inference.\n","#\n","# **Avoid `cudnn.benchmark = True` if:**\n","# - Your input sizes vary often (e.g., variable batch sizes, different image resolutions). The overhead of benchmarking each new size can outweigh the benefits. (See Slide 17)\n","\n","# %%\n","if use_gpu and torch.backends.cudnn.is_available():\n"," print(\"Benchmarking MatMul with varying sizes...\")\n"," fixed_dim = 2048\n"," variable_dims = list(range(1024, 3072, 64)) # Vary one dimension\n"," times_default = []\n"," times_benchmark_mode = []\n","\n"," # --- Default Mode ---\n"," torch.backends.cudnn.benchmark = False\n"," print(\"Running with cudnn.benchmark = False\")\n"," a = torch.randn(fixed_dim, fixed_dim, device=device) # Reusable tensor\n"," for dim in variable_dims:\n"," b = torch.randn(fixed_dim, dim, device=device)\n"," # Warmup\n"," for _ in range(2): torch.matmul(a, b); torch.cuda.synchronize()\n"," # Measure\n"," torch.cuda.synchronize()\n"," start = time.time()\n"," for _ in range(5): # Average over a few runs\n"," c = torch.matmul(a, b)\n"," torch.cuda.synchronize()\n"," end = time.time()\n"," times_default.append(((end - start) / 5) * 1000) # Avg time in ms\n"," del b, c # Free memory\n"," torch.cuda.empty_cache()\n","\n"," # --- Benchmark Mode ---\n"," torch.backends.cudnn.benchmark = True\n"," # NOTE: In benchmark mode, the *first* time a size is seen incurs overhead.\n"," # Subsequent calls with the *same* size should be faster.\n"," # Our loop uses different sizes, so we might not see the full benefit here,\n"," # and might even see slowdown due to repeated benchmarking.\n"," # This mode is best when sizes are *constant*.\n"," print(\"Running with cudnn.benchmark = True\")\n"," a = torch.randn(fixed_dim, fixed_dim, device=device) # Recreate to reset potential cache\n"," for dim in variable_dims:\n"," b = torch.randn(fixed_dim, dim, device=device)\n"," # Allow benchmark to run on first pass, then measure\n"," torch.cuda.synchronize()\n"," start = time.time()\n"," # The first call within the timing loop might trigger benchmarking\n"," for _ in range(5):\n"," c = torch.matmul(a, b)\n"," torch.cuda.synchronize()\n"," end = time.time()\n"," times_benchmark_mode.append(((end - start) / 5) * 1000) # Avg time in ms\n"," del b, c # Free memory\n"," torch.cuda.empty_cache()\n","\n"," # --- Plotting ---\n"," plt.figure(figsize=(10, 6))\n"," plt.plot(variable_dims, times_default, label='cudnn.benchmark = False', marker='o')\n"," # Plot benchmark mode only if useful (might be noisy here)\n"," # plt.plot(variable_dims, times_benchmark_mode, label='cudnn.benchmark = True', marker='x')\n"," plt.xlabel(\"Variable Dimension Size (N in KxN MatMul)\")\n"," plt.ylabel(\"Average Execution Time (ms)\")\n"," plt.title(\"MatMul Performance vs. Input Shape (M=2048, K=2048)\")\n"," plt.legend()\n"," plt.grid(True)\n"," plt.show()\n","\n"," print(\"\\nNote: Performance variations can be subtle and depend heavily on\")\n"," print(\"the specific GPU, CUDA version, and operation.\")\n"," print(\"The 'steps' or non-smoothness relate to Tile/Wave Quantization effects.\")\n"," print(\"cudnn.benchmark=True is most effective for *fixed* input sizes.\")\n","\n"," # Reset benchmark mode\n"," torch.backends.cudnn.benchmark = False\n","else:\n"," print(\"Skipping cudnn.benchmark demo (requires GPU and cuDNN).\")\n","\n","\n","# %% [markdown]\n","# ## 7. `torch.utils.benchmark`: A Better Tool for Microbenchmarking\n","#\n","# Manually handling warm-up, synchronization, and multiple runs is tedious and error-prone. PyTorch provides `torch.utils.benchmark` for more robust microbenchmarking. (See Slide 18)\n","\n","# %%\n","try:\n"," import torch.utils.benchmark as benchmark\n","except ImportError:\n"," print(\"torch.utils.benchmark not available (requires recent PyTorch version).\")\n"," benchmark = None\n","\n","if use_gpu and benchmark:\n"," size = 2048\n"," a = torch.randn(size, size, device=device)\n"," b = torch.randn(size, size, device=device)\n"," stmt = \"torch.matmul(a, b)\"\n","\n"," # Basic usage\n"," timer = benchmark.Timer(\n"," stmt=stmt,\n"," globals={'a': a, 'b': b}\n"," )\n","\n"," # Run the benchmark\n"," measurement = timer.timeit(100) # Run stmt 100 times for timing\n","\n"," # Print the results (includes mean, median, stddev)\n"," print(\"Benchmarking MatMul with torch.utils.benchmark:\")\n"," print(measurement)\n","\n"," # Example with setup and different threads\n"," timer_with_setup = benchmark.Timer(\n"," stmt=\"torch.matmul(x, y)\",\n"," setup=\"x = torch.randn(s, s, device=dev); y = torch.randn(s, s, device=dev)\",\n"," globals={'s': size, 'dev': device},\n"," num_threads=1 # Control CPU threads used by PyTorch (relevant for some ops)\n"," )\n"," measurement_setup = timer_with_setup.timeit(50)\n"," print(\"\\nBenchmarking with setup:\")\n"," print(measurement_setup)\n","\n"," # Comparing two versions\n"," label = \"MatMul\"\n"," sub_label = f\"{size}x{size}\"\n"," results = []\n"," # Version 1 (standard)\n"," results.append(benchmark.Timer(stmt=stmt, globals={'a': a, 'b': b}, label=label, description=\"Standard\").blocked_autorange())\n"," # Version 2 (let's pretend we optimize - e.g., fused op if available)\n"," # For demo, just run the same thing again\n"," results.append(benchmark.Timer(stmt=stmt, globals={'a': a, 'b': b}, label=label, description=\"Hypothetical Opt\").blocked_autorange())\n","\n"," compare = benchmark.Compare(results)\n"," print(\"\\nComparing implementations:\")\n"," compare.print()\n","\n","\n","else:\n"," if not benchmark:\n"," print(\"torch.utils.benchmark not imported.\")\n"," else:\n"," print(\"Skipping torch.utils.benchmark demo (requires GPU).\")\n","\n","\n","# %% [markdown]\n","# ## 8. Conclusion & Key Takeaways\n","#\n","# Understanding the basics of how code executes on a GPU is crucial for writing efficient deep learning systems.\n","#\n","# - **Asynchronous Execution:** Be aware of it and use `torch.cuda.synchronize()` for accurate timing.\n","# - **Memory Transfers:** Minimize H2D/D2H copies. Use pinned memory (`pin_memory=True`) strategically, especially in data loaders, to potentially speed up H2D transfers.\n","# - **Benchmarking:** Always perform warm-up runs. Use tools like `timeit` or preferably `torch.utils.benchmark` for reliable measurements.\n","# - **Input Shapes Matter:** Performance isn't always smooth. Be mindful of Tile/Wave quantization effects. Use `torch.backends.cudnn.benchmark = True` only when input shapes are constant.\n","# - **Don't Overoptimize Prematurely:** Profile your code to find the real bottlenecks before spending time on micro-optimizations (Slide 18). Focus on algorithmic improvements, data loading, and minimizing unnecessary work first.\n","#\n","# This introduction lays the groundwork for exploring more advanced topics like profiling, distributed training, and model optimization covered later in the course.\n","\n","# %%\n","print(\"End of tutorial.\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":1000},"id":"49zOjwgVrDQ-","executionInfo":{"status":"ok","timestamp":1745903385376,"user_tz":-420,"elapsed":3401,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"923fb0a6-ed6b-4205-e209-e3438891ea4e"},"execution_count":2,"outputs":[{"output_type":"stream","name":"stdout","text":["CUDA device detected: NVIDIA L4\n","Total GPU Memory: 22.16 GB\n","Incorrect MM time (no sync): 0.5832 ms\n","Correct MM time (with sync): 1.2836 ms\n","Correct MM time (timeit, avg over 10 runs): 1.0801 ms\n","Time for matmul + .cpu() transfer: 3.9089 ms\n","Time for sum + .item(): 0.0453 ms\n","Sum result: -13279.021484375\n","Benchmarking with Warm-up:\n","Warm-up runs...\n","Measurement runs...\n","Average time over 10 measurement runs: 0.4783 ms\n","Standard deviation: 0.0033 ms\n","H2D Transfer Time (128 MB):\n"," Pageable Memory: 28.5886 ms\n"," Pinned Memory: 10.9343 ms\n"," Speedup: 2.61x\n","\n","D2H Transfer Time (128 MB):\n"," To Pageable Memory: 26.6042 ms\n"," To Pinned Memory: 10.2439 ms\n"," Speedup: 2.60x\n","Benchmarking MatMul with varying sizes...\n","Running with cudnn.benchmark = False\n","Running with cudnn.benchmark = True\n"]},{"output_type":"display_data","data":{"text/plain":["
"],"image/png":"iVBORw0KGgoAAAANSUhEUgAAA04AAAIjCAYAAAA0vUuxAAAAOnRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjEwLjAsIGh0dHBzOi8vbWF0cGxvdGxpYi5vcmcvlHJYcgAAAAlwSFlzAAAPYQAAD2EBqD+naQAAuohJREFUeJzs3XdYk1cbBvA7YYWNgAwRFREH4h4V90JRi7W2tWqtWkcdtVqttrW2VVrraq1bWzvUOltH7edC0Yp71IFK0TpAcbCRvULyfn9gUmMCJJAQxv27Li/NyZv3fXISME/OOc8RCYIggIiIiIiIiIokNnYAREREREREFR0TJyIiIiIiohIwcSIiIiIiIioBEyciIiIiIqISMHEiIiIiIiIqARMnIiIiIiKiEjBxIiIiIiIiKgETJyIiIiIiohIwcSIiIiIiIioBEyci0tm8efMgEomMHYaKzMxMjBs3Dm5ubhCJRPjggw+MHRKRRmFhYRCJRNi1a5exQym1yZMnIyAgwNhhUBUVEhICGxsbJCYmGjsUIhVMnIgMYOPGjRCJRBCJRDh9+rTa/YIgwNPTEyKRCC+//HKprrFgwQLs3bvXKNfW1ujRo5WxiEQi2NnZoUWLFli6dCny8vL0eq0FCxZg48aNmDRpEjZv3oy3335br+cnw+vevTv8/PyMHYbS2bNnMW/ePKSmpmr9mH379qFbt25wcXGBlZUV6tevjyFDhiAkJMRwgZaz6Oho/PTTT/j000+Vbffv31f+nM+fP1/j49566y2IRCLY2NiUOYbs7GysWbMGffr0gbu7O2xtbdGqVSusW7cOMplM7Xi5XI4lS5bAy8sLEokEzZs3x/bt24u9hlQqha+vL0QiEb799lu1+2NjY/Huu+/Cy8sLlpaW8Pb2xowZM5CcnFyq56TowxevJQgCJkyYAJFIhHnz5pXq3Ap///03pkyZgqZNm8La2hp16tTBkCFDcPv2bY3H37x5E4GBgbCxsYGjoyPefvvtEpOZrVu3Fvs6//777+jQoQMcHBzg5OSEbt264cCBAyrHBAYGokGDBli4cGHpniiRgTBxIjIgiUSCbdu2qbWfOHECjx49goWFRanPXVTiVB7X1oWFhQU2b96MzZs3Y8GCBXB0dMTMmTMxatQovV7nr7/+QocOHTB37lyMGDECbdq00ev5qfo5e/YsgoODtU6cvv32WwwcOBAikQizZ8/GsmXL8Nprr+HOnTvYsWOHYYMtRytWrICXlxd69Oihdp9EItGYkGRlZeHPP/+ERCLRSwxRUVF4//33IQgCZsyYgW+//RZeXl6YPHkyxowZo3b8nDlz8PHHHyMgIACrVq1CnTp1MHz48GJfl1WrViEmJkbjfZmZmfD398cff/yBkSNHYtWqVejfvz9Wr16N3r17Qy6X6+V5CoKAyZMnY/369fj888/LnDgtXrwYu3fvRq9evbBixQq8++67OHnyJFq3bo2IiAiVYx89eoSuXbvi7t27WLBgAWbOnIkDBw4gICAA+fn5Gs+fmZmJjz76CNbW1hrvX7VqFd588004Oztj0aJF+Pzzz5GWloaXX34Ze/bsUTl2woQJ+OGHH5CRkVGm50ykVwIR6d2GDRsEAMLgwYMFZ2dnQSqVqtw/fvx4oU2bNkLdunWFAQMGlOoa1tbWwqhRo4xy7blz5wra/PoYNWqUYG1trdImk8mEtm3bCgCEx48fl+r6z58rJydHEARB8PLyKvXz0UQqlQp5eXl6Ox+VrFu3bkLTpk2NHYbSN998IwAQoqOjSzxWKpUKdnZ2QkBAgMb74+Pjlf8+fvy4AEDYuXOnvkItN/n5+YKzs7Pw2WefqbRHR0crf+8AEMLDw1Xu37p1q2BmZiYEBQWp/U4ojcTERCEiIkKt/Z133hEACHfu3FG2PXr0SDAzMxPee+89ZZtcLhe6dOki1K5dWygoKFA7T3x8vGBvby98+eWXAgDhm2++UXs+AIT9+/ertH/xxRcCAOHKlSs6PydFHz5/rffee08AIMyZM0fn82ly5swZtd9rt2/fFiwsLIS33npLpX3SpEmCpaWl8ODBA2VbaGioAED44YcfNJ7/448/Fho1aiS89dZbGl9nHx8foV27doJcLle2paWlCTY2NsLAgQNVjo2PjxdMTEyEn3/+WefnSWQoHHEiMqBhw4YhOTkZoaGhyrb8/Hzs2rULw4cP1/iYb7/9Fh07doSTkxMsLS3Rpk0btbUQIpEIWVlZ2LRpk3J6zOjRo8t0bcW6i7CwMJV2xfSRjRs36vbkiyAWi9G9e3fluQEgLy8Pc+fORYMGDWBhYQFPT0989NFHatP5RCIRpkyZgq1bt6Jp06awsLBASEgIRCIRoqOjceDAAWV/KM6dkJCAsWPHwtXVFRKJBC1atMCmTZs0Psdvv/0Wy5cvh7e3NywsLBAZGalcz3X79m2MGDEC9vb2qFmzJj7//HMIgoCHDx/ilVdegZ2dHdzc3LB06VKVc+fn5+OLL75AmzZtYG9vD2tra3Tp0gXHjx8vMob169crY2jXrh3+/vtvtX68desWhgwZgpo1a8LS0hKNGjXCnDlzVI55/PgxxowZA1dXV1hYWKBp06b45ZdfSnyN/Pz8NI4myOVyeHh44PXXX1e27dixA23atIGtrS3s7OzQrFkzrFixosRraEvxmu/duxd+fn7K5/Hi1DfF66ToFzs7Ozg5OWHatGnIzc1VHlfc+/n5qVDz5s3DrFmzAABeXl5q76sXJSUlIT09HZ06ddJ4v4uLi1qbXC7H119/jdq1a0MikaBXr164e/euyjGnTp3CG2+8gTp16ih/NqZPn46cnByV40aPHg0bGxtERUWhb9++sLa2Rq1atfDll19CEAS16y5fvhxNmzaFRCKBq6srJkyYgKdPn2qM/XmnT59GUlISevfurfF+f39/eHl5qY12b926FYGBgXB0dCzxGtpwdnZG06ZN1dpfffVVAIVTzBT+/PNPSKVSTJ48WdkmEokwadIkPHr0COfOnVM7zyeffIJGjRphxIgRGq+fnp4OAHB1dVVpd3d3BwBYWlrq+IzUTZs2DWvWrMHs2bOLnP6oq44dO8Lc3FylzcfHB02bNlXpMwDYvXs3Xn75ZdSpU0fZ1rt3bzRs2BC///672rnv3LmDZcuW4bvvvoOpqanG66enp8PFxUVljaydnR1sbGzU+szFxQXNmzfHn3/+qfPzJDIUze9sItKLevXqwd/fH9u3b0e/fv0AAIcOHUJaWhqGDh2KlStXqj1mxYoVGDhwIN566y3k5+djx44deOONN7B//34MGDAAALB582aMGzcO7du3x7vvvgsA8Pb2LvO1y8u9e/cAAE5OTpDL5Rg4cCBOnz6Nd999F02aNMGNGzewbNky3L59W2064l9//YXff/8dU6ZMgbOzM9zd3bF582ZMnz4dtWvXxocffggAqFmzJnJyctC9e3fcvXsXU6ZMgZeXF3bu3InRo0cjNTUV06ZNUzn3hg0bkJubi3fffRcWFhYqH/LefPNNNGnSBIsWLcKBAwcwf/58ODo64ocffkDPnj2xePFibN26FTNnzkS7du3QtWtXAIUfFH766ScMGzYM48ePR0ZGBn7++Wf07dsXFy9eRMuWLVVi2LZtGzIyMpRrGpYsWYLBgwcjKioKZmZmAIDr16+jS5cuMDMzw7vvvot69erh3r172LdvH77++msAQHx8PDp06KBMPGrWrIlDhw5h7NixSE9PL7Z4xptvvol58+YhLi4Obm5uyvbTp0/jyZMnGDp0KAAgNDQUw4YNQ69evbB48WIAhR9Yz5w5o9a3ZXH69Gns2bMHkydPhq2tLVauXInXXnsNMTExcHJyUjl2yJAhqFevHhYuXIjz589j5cqVePr0KX799Vedrjl48GDcvn0b27dvx7Jly+Ds7Ayg8H2liYuLCywtLbFv3z68//77WiUIixYtglgsxsyZM5GWloYlS5bgrbfewoULF5TH7Ny5E9nZ2Zg0aRKcnJxw8eJFrFq1Co8ePcLOnTtVzieTyRAYGIgOHTpgyZIlCAkJwdy5c1FQUIAvv/xSedyECROwceNGvPPOO5g6dSqio6OxevVqXL16FWfOnFG+zzQ5e/YsRCIRWrVqVeQxw4YNw5YtW7Bo0SKIRCIkJSXhyJEj2Lx5s8a1XpmZmSrJbVHMzMxgb29f7DFxcXEAoHy9AODq1auwtrZGkyZNVI5t37698v7OnTsr2y9evIhNmzbh9OnTRRbB6dq1K8RiMaZNm4alS5eidu3auH79Or7++msMGjQIjRs3LvH5FGf69OlYuXIlPv74YyxYsEDtfrlcjpSUFK3OZW9vX+xrKggC4uPjVRLRx48fIyEhAW3btlU7vn379jh48KBa+wcffIAePXqgf//+GhMroHAd465du7Bq1SoEBQUhNzcXq1atQlpamsbfGW3atCl2SjpRuTPyiBdRlaSYLvf3338Lq1evFmxtbYXs7GxBEAThjTfeEHr06CEIgqBxupziOIX8/HzBz89P6Nmzp0p7SVP1dL22YvrQ8ePHVc6nmD6yYcMGZZuuU/USExOFxMRE4e7du8KCBQsEkUgkNG/eXBAEQdi8ebMgFouFU6dOqTz2+++/FwAIZ86cUbYBEMRisfDPP/+oXUtTXy5fvlwAIGzZskXZlp+fL/j7+ws2NjZCenq6ynO0s7MTEhISVM6heK7vvvuusq2goECoXbu2IBKJhEWLFinbnz59KlhaWqq8LgUFBWpTY54+fSq4uroKY8aMUbYpYnBychJSUlKU7X/++acAQNi3b5+yrWvXroKtra3KFBpBEFSmv4wdO1Zwd3cXkpKSVI4ZOnSoYG9vr/Y+e96///4rABBWrVql0j558mTBxsZG+dhp06YJdnZ2Gqc6lYamqXoABHNzc+Hu3bvKtmvXrqnFp3idXpzuM3nyZAGAcO3aNUEQNL+fn7/W3Llzlbd1maonCP9N07K2thb69esnfP3118Lly5fVjlP8rDVp0kTlvbFixQoBgHDjxg1lm6bXaeHChYJIJFJ5/UeNGiUAEN5//31lm1wuFwYMGCCYm5sLiYmJgiAIwqlTpwQAwtatW1XOGRISorH9RSNGjBCcnJzU2p+fZhYRESEAUP5Mr1mzRrCxsRGysrI0Tt9VxF7Sn27duhUbW15enuDr6yt4eXmpTFEeMGCAUL9+fbXjs7KyBADCJ598otJn7du3F4YNG6b2vF70008/CQ4ODioxjho1Sm16tLYU16pbt64AQJg1a1aJx2rz58Xf6S/avHmzAEBlStzff/8tABB+/fVXteNnzZolABByc3OVbfv37xdMTU2Vv5s1vc6CUDj9rlevXirxOTs7C2fPntUY24IFCwQAKlNdiYyJI05EBjZkyBB88MEH2L9/PwIDA7F///5iR3uen67w9OlTyGQydOnSpcQKUPq4tiFkZWWpfUvfsWNHbN68GUDhN+pNmjRB48aNkZSUpDymZ8+eAIDjx4+jY8eOyvZu3brB19dXq2sfPHgQbm5uGDZsmLLNzMwMU6dOxbBhw3DixAmVyoKvvfZakSMK48aNU/7bxMQEbdu2xaNHjzB27Fhlu4ODAxo1aoSoqCiVY01MTAAUfkucmpoKuVyOtm3b4sqVK2rXefPNN1GjRg3l7S5dugCA8pyJiYk4efIkpk2bpjKFBoDy23FBELB7924MGTIEgiCo9Gvfvn2xY8cOXLlypchpZQ0bNkTLli3x22+/YcqUKQAKRzN27dqFoKAg5XvUwcEBWVlZCA0NRWBgoMZz6UPv3r1VRlSbN28OOzs7lX5WeO+991Ruv//++1i7di0OHjyI5s2bGyxGAAgODkbjxo2xdu1aHD58GIcOHcKcOXPQqlUrbN26VW3E45133lGZNvX8a62oLvj874OsrCzk5OSgY8eOEAQBV69eVXsPKF4v4L9pjgcOHMDRo0cxdOhQ7Ny5E/b29ggICFB5X7Rp0wY2NjY4fvx4kdOIASA5OVnl/alJ06ZNlVXrOnfujG3btuGVV16BlZWVxuM/+uijIqfEPa+k606ZMgWRkZE4cOCAylSxnJwcjcVwFIUqnp/2uHHjRty4cUOrUvEeHh5o3749+vfvj7p16+LUqVNYuXIlnJ2dNVbh01Z8fDyAwp/Dori5ualMwy5OixYtirzv1q1beO+99+Dv769SsEfRJyX1m4WFBfLz8zF9+nRMnDixxN/NVlZWaNSoEWrXro2XX34ZGRkZWLZsGQYPHoxTp06hQYMGKscrXvOkpCSN012JyhsTJyIDq1mzJnr37o1t27YhOzsbMplMZY3Ii/bv34/58+cjPDxcZY1PafZN0vXahiCRSLBv3z4Ahf8Je3l5oXbt2sr779y5g5s3bxaZsCQkJKjc9vLy0vraDx48gI+PD8Ri1eWcig+wDx480PrcL35Atbe3h0QiUZkSpGh/sRzxpk2bsHTpUty6dQtSqbTY6714HcUHB8X6E0WyUFzZ7sTERKSmpmL9+vVYv369xmNe7NcXvfnmm/j000/x+PFjeHh4ICwsDAkJCXjzzTeVx0yePBm///47+vXrBw8PD/Tp0wdDhgzRexL1Yp8Ahf2iaU2Oj4+Pym1vb2+IxeIi1ybp27BhwzBs2DCkp6fjwoUL2LhxI7Zt24agoCBERESoVJUr6bUGgJiYGHzxxRf43//+p/Z809LSVG6LxWLUr19fpU3x4Vvx/O/cuYO0tLQiP4SW9L4AoLZmSpPhw4dj6dKlmD59Os6ePatSuvxFvr6+Wn8ZUpRvvvkGP/74I7766iv0799f5T5LS0uN2x8opgcqktP09HTMnj0bs2bNgqenZ7HXO3PmDF5++WWcP39eOZ1t0KBBsLOzQ3BwMMaMGVPq5/Txxx/j4MGDmDBhAhwcHDT+zpZIJEWuM9NWXFwcBgwYAHt7e+zatUv5BQ/wX59o02/Lli1DUlISgoODS7zmG2+8AVNTU+X/CQDwyiuvwMfHB3PmzMFvv/2mcrzivVbR9g2k6ouJE1E5GD58OMaPH4+4uDj069cPDg4OGo87deoUBg4ciK5du2Lt2rVwd3eHmZkZNmzYoLG0uD6vXdR/TJr2RNGFiYlJsf/By+VyNGvWDN99953G+1/8AKOPRddFKe7cz3+oKK4NUP1guWXLFowePRqDBg3CrFmz4OLiAhMTEyxcuFC51kvXc5ZEUQp5xIgRRZZ9L2n05c0338Ts2bOxc+dOfPDBB/j9999hb2+vkhS5uLggPDxcObpy6NAhbNiwASNHjlQrwFEWZemTF9/Xhnqfv8jOzg4BAQEICAiAmZkZNm3ahAsXLqBbt27KY0p6XjKZDAEBAUhJScHHH3+Mxo0bw9raGo8fP8bo0aNLVfJaLpfDxcUFW7du1Xh/UV9gKDg5OWlVRGLYsGGYPXs2xo8fDycnJ/Tp06fIY9PS0tSKXWhibm6uce3Yxo0b8fHHH2PixIn47LPP1O53d3fH8ePHIQiCyusfGxsLAKhVqxaAwsI8+fn5ePPNN5WJ5qNHjwAUJrP3799HrVq1YG5ujh9++AGurq5qa4AGDhyIefPm4ezZs6VOnGxsbHDo0CF07doVb731Fuzs7NT6TyaTab05rKOjo1pBiLS0NPTr1w+pqak4deqUsg8UFEUuFH30vNjYWDg6OsLCwgJpaWmYP38+Jk+ejPT0dGXRjMzMTAiCgPv378PKygouLi6IiopCSEiI2pc5jo6O6Ny5M86cOaN2LcV77cUvqIiMhYkTUTl49dVXMWHCBJw/f17tG7Xn7d69GxKJBIcPH1aZIrFhwwa1Y7X9Bk7bayu+7X5xz5oXR2X0zdvbG9euXUOvXr30/q1i3bp1cf36dcjlcpVRp1u3binvN7Rdu3ahfv362LNnj8rzmzt3bqnOpxhReHHPlefVrFkTtra2kMlkpf5W2svLC+3bt1dO19uzZw8GDRqkNnXH3NwcQUFBCAoKglwux+TJk/HDDz/g888/V5t2Ux7u3LmjMpJ39+5dyOVy1KtXD4Bu73N9vR/btm2LTZs2afwQWpwbN27g9u3b2LRpE0aOHKlsL2qKllwuR1RUlMoUL8XGporn7+3tjaNHj6JTp06l+hKicePG2Lp1K9LS0oot1FCnTh106tQJYWFhmDRpUpFV1oDC6nHaJNrdunVTq/r5559/Yty4cRg8eDDWrFmj8XEtW7bETz/9hJs3b6okM4oiHIoCLTExMXj69KnGan0LFizAggULcPXqVbRs2RLx8fEak23FiHJBQUGJz6c4Tk5OOHLkCDp16oTBgwcjNDQU/v7+yvsfPnyo9ej78ePHlZVMgcIRo6CgINy+fRtHjx7VmOB5eHigZs2auHTpktp9zxe1efr0KTIzM7FkyRIsWbJE7VgvLy+88sor2Lt3r3IKYlH9pqnPoqOj4ezsXGJCT1RemDgRlQMbGxusW7cO9+/fR1BQUJHHmZiYQCQSqfzHcv/+fY1VhaytrbXamFPba9etWxcmJiY4efIkBg0apGxfu3ZtidcoiyFDhuDgwYP48ccflRUCFXJyciCXy4vcTLEk/fv3x5EjR/Dbb78p1zkVFBRg1apVsLGxUfn231AUowrPf9t94cIFnDt3TuMUtJLUrFkTXbt2xS+//IIZM2aonENxDRMTE7z22mvYtm0bIiIi1Kb1JSYmavVB5M0338SHH36IX375BUlJSSrT9IDC9S7PV7UTi8XKkSzFFB+pVIp79+7B3t5e+S22Ia1Zs0bl2/lVq1YBgLKypJ2dHZydnXHy5EmVyoKa3ueK9502P2fZ2dm4du2ayodbhUOHDgEAGjVqpPXzAFTfOwqCIBRb7n316tXKdYyCIGD16tUwMzNDr169ABT+vK1duxZfffWVWrW2goICZGZmFjkqDRSWGxcEAZcvX1auQyzK/Pnzcfz4cbX3zYtKu8bp5MmTGDp0KLp27YqtW7eqTclVeOWVVzB9+nSsXbsWq1evBlDYN99//z08PDyUayinTp2q8rsPKJy6OGHCBIwePRqvvPKKMllp2LAhjhw5grCwMJWkRLEWtbiqg9ry8PBAaGgoOnfujAEDBuDEiRNo1qwZgNKvcZLJZHjzzTdx7tw5/PnnnxrfrwqvvfYaNm3ahIcPHypH/o8dO4bbt29j+vTpAApHnf/44w+1x65cuRLnzp3D9u3blT/3DRo0gFgsxm+//aasGgoUjuqdOnVKpbKhwuXLl4uNkai8MXEiKidFTZl63oABA/Ddd98hMDAQw4cPR0JCAtasWYMGDRrg+vXrKse2adMGR48exXfffYdatWrBy8sLL730UqmvbW9vjzfeeAOrVq2CSCSCt7c39u/fr9Wah7J4++238fvvv2PixIk4fvw4OnXqBJlMhlu3buH333/H4cOHNZbE1ca7776LH374AaNHj8bly5dRr1497Nq1C2fOnMHy5ctha2ur52ej7uWXX8aePXvw6quvYsCAAYiOjsb3338PX19fZGZmluqcK1euROfOndG6dWu8++678PLywv3793HgwAGEh4cDKCx1ffz4cbz00ksYP348fH19kZKSgitXruDo0aNalTIeMmQIZs6ciZkzZ8LR0VFt9GrcuHFISUlBz549Ubt2bTx48ACrVq1Cy5YtlevIHj9+jCZNmmDUqFF62wusONHR0Rg4cCACAwNx7tw5bNmyBcOHD1f58Dhu3DgsWrQI48aNQ9u2bXHy5EnlyMzz2rRpAwCYM2cOhg4dCjMzMwQFBWlM5LOzs9GxY0d06NABgYGB8PT0RGpqKvbu3YtTp05h0KBBOn+Ybty4Mby9vTFz5kw8fvwYdnZ22L17d5FT5SQSCUJCQjBq1Ci89NJLOHToEA4cOIBPP/1UmSh369YNEyZMwMKFCxEeHo4+ffrAzMwMd+7cwc6dO7FixYpi10F27twZTk5OOHr0aImJU7du3bT6cqI0a5wePHiAgQMHQiQS4fXXX1crzd68eXNlEl+7dm188MEH+OabbyCVStGuXTvl67J161Zlgtq6dWu0bt1a5TyKKXtNmzZVSaqmTJmCDRs2ICgoCO+//z7q1q2LEydOYPv27QgICFD5Xawo/b5hwwa1/fZK4uPjg8OHD6N79+7o27cvTp8+jfr165d6jdOHH36I//3vfwgKCkJKSgq2bNmicv/zCeynn36KnTt3okePHpg2bRoyMzPxzTffoFmzZnjnnXcAFBZ7eDHZBIC9e/fi4sWLKvfVrFkTY8aMwU8//YRevXph8ODByMjIwNq1a5GTk4PZs2ernCMhIQHXr19XK/hCZFTlXcaPqDp4viR4cTSV0P75558FHx8fwcLCQmjcuLGwYcMGjeW/b926JXTt2lWwtLRUlsEt67UTExOF1157TbCyshJq1KghTJgwQVlauCzlyEuSn58vLF68WGjatKlgYWEh1KhRQ2jTpo0QHBwspKWlKY8DILz33ntaPx9BKCx/+8477wjOzs6Cubm50KxZM7VS1MWVHFY8V0U555Ke24slteVyubBgwQKhbt26goWFhdCqVSth//79wqhRo4S6detqFQNeKJMtCIIQEREhvPrqq4KDg4MgkUiERo0aCZ9//rnac3/vvfcET09PwczMTHBzcxN69eolrF+/Xu0aRenUqZMAQBg3bpzafbt27RL69OkjuLi4CObm5kKdOnWECRMmCLGxsWrPS1Pp/BcVVY5c02tet25dlXMqXqfIyEjh9ddfF2xtbYUaNWoIU6ZMEXJyclQem52dLYwdO1awt7cXbG1thSFDhggJCQka+/mrr74SPDw8BLFYXGxpcqlUKvz444/CoEGDlK+1lZWV0KpVK+Gbb75RKTuuKEe+c+dOlXNoKpUeGRkp9O7dW7CxsRGcnZ2F8ePHK8uxP3+c4v147949oU+fPoKVlZXg6uoqzJ07V5DJZGrxrl+/XmjTpo1gaWkp2NraCs2aNRM++ugj4cmTJxqf3/OmTp0qNGjQQGPsmt6/z9P2d0JJFH1Y1J8XX0eZTKb8OTQ3NxeaNm2qsk1BUYp7Xrdu3RJef/115c9X3bp1hZkzZwpZWVkqx61atUoAIISEhJT6WqdOnRIsLS0FLy8v4fHjxyXGXZRu3boV228vioiIUL6fHBwchLfeekuIi4sr8TpFvc5SqVRYtWqV0LJlS8HGxkawsbERevToIfz1119qx65bt06wsrJSbhtBVBGIBEGHFcdEREQV0Lx58xAcHIzExMRquZB89OjR2LVrV6lHMXURFRWFxo0b49ChQ8opgFS0IUOG4P79+7h48aKxQ6lUWrVqhe7du2PZsmXGDoVIiVP1iIiISGv169fH2LFjsWjRIiZOJRAEAWFhYWpT4qh4ISEhuHPnDg4fPmzsUIhUMHEiIiIinaxbt87YIVQKIpHI4OtEq6LAwMByGT0l0pXmEjRERERERESkxDVOREREREREJeCIExERERERUQmYOBEREREREZWg2hWHkMvlePLkCWxtbZW7VhMRERERUfUjCAIyMjJQq1YtiMXFjylVu8TpyZMn8PT0NHYYRERERERUQTx8+BC1a9cu9phqlzjZ2toCKOwcOzs7g15LKpXiyJEj6NOnD8zMzAx6reqKfWx47GPDYv8aHvvY8NjHhsc+Njz2sWFV1P5NT0+Hp6enMkcoTrVLnBTT8+zs7MolcbKysoKdnV2FeoNUJexjw2MfGxb71/DYx4bHPjY89rHhsY8Nq6L3rzZLeFgcgoiIiIiIqARMnIiIiIiIiErAxImIiIiIiKgE1W6NkzYEQUBBQQFkMlmZziOVSmFqaorc3Nwyn4s0Yx8bnrH72MTEBKamptw+gIiIiIyKidML8vPzERsbi+zs7DKfSxAEuLm54eHDh/zQZyDsY8OrCH1sZWUFd3d3mJubG+X6REREREycniOXyxEdHQ0TExPUqlUL5ubmZfqgKJfLkZmZCRsbmxI31KLSYR8bnjH7WBAE5OfnIzExEdHR0fDx8eHrTEREREbBxOk5+fn5kMvl8PT0hJWVVZnPJ5fLkZ+fD4lEwg97BsI+Njxj97GlpSXMzMzw4MEDZRxERERE5Y2fNDXgB3CiioU/k0RERGRs/DRCRERERERUAiZOREREREREJWDiZCAyuYDzUck4FJmI81HJkMkFY4eks40bN8LBwcHg1xk9ejQGDRpk8OuUpF69eli+fLmxw1AKCwuDSCRCamqqsUPRSnm9X4iIiIiMgcUhDCAkIhbB+yIRm5arbHO3l2BukC8C/dyNGBlR8UaPHo1NmzaptV++fBktW7Ys/4CIiIiIKgiOOOlZSEQsJm25opI0AUBcWi4mbbmCkIhYI0VGlYlUKjXatQMDAxEbG6v88/jxY9StW9do8RARERFVBEycSiAIArLzC7T6k5Erxdz//QNNk/IUbfP+F4mMXKlW5xME7af3yeVyLFmyBA0aNICFhQXq1KmDr7/+GoDmKV/h4eEQiUS4f/++sm3jxo2oU6cOrKys8OqrryI5OVnlGvPmzUPLli2xefNm1KtXD/b29hg6dCgyMjKUx3Tv3h1Tp07FRx99BEdHR7i5uWHevHlaPYfg4GDUrFkTdnZ2mDhxIvLz81We38KFC+Hl5QVLS0u0aNECu3btUt6veI7Hjh1D27ZtYWVlhY4dO+Lff/9Vuca+ffvQrl07SCQSODs749VXX1W5Pzs7G2PGjIGtrS3q1KmD9evXK++7f/8+RCIRfv/9d3Tp0gWWlpZo164dbt++jb///htt27aFjY0N+vXrh8TEROXj/v77bwQEBMDZ2Rn29vbo1q0brly5onJdkUiEdevWYeDAgbC2tla+di/G1q9fP3Tq1Mmg0/csLCzg5uam8sfExATLli1Ds2bNYG1tDU9PT0yePBmZmZlFnufatWvo0aMHbG1tYWdnhzZt2uDSpUvK+0+fPq3sR09PT0ydOhVZWVkGe15ERERVjUwu4Ny9ZPwZ/hjn7lXOpSGVCafqlSBHKoPvF4f1ci4BQFx6LprNO6LV8ZFf9oWVuXYv0ezZs/Hjjz9i2bJl6Ny5M2JjY3Hr1i2tY7tw4QLGjh2LhQsXYtCgQQgJCcHcuXPVjrt37x727t2L/fv34+nTpxgyZAgWLVqk8kF/06ZNmDFjBi5cuIBz585h9OjR6NSpEwICAoq8/rFjxyCRSBAWFob79+/jnXfegZOTk/K8CxcuxJYtW/D999/Dx8cHJ0+exIgRI3Do0CG0atVKeZ45c+Zg6dKlqFmzJiZOnIgxY8bgzJkzAIADBw7g1VdfxZw5c/Drr78iPz8fBw8eVIlj6dKl+Oqrr/Dpp59i165dmDRpErp164ZGjRopj5k7dy6WL1+OOnXqYMyYMRg+fDhsbW2xYsUKWFlZYciQIfjiiy+wbt06AEBGRgZGjRqFVatWQRAELF26FP3798edO3dga2urPO+8efOwaNEiLF++HKampoiKilLel5qaigEDBsDGxgahoaFF7jO2YMECLFiwoMh+BoDIyEjUqVOn2GM0EYvFWLlyJby8vBAVFYXJkyfjo48+wtq1azUe/9Zbb6FVq1ZYt24dTExMEB4eDjMzMwCF76PAwEDMnz8fv/zyCxITEzFlyhRMmTIFGzZs0Dk2IiKi6saQS0NkcgEXo1OQkJELF1sJ2ns5wkQsKmvIlR4TpyogIyMDK1aswOrVqzFq1CgAgLe3Nzp37qz1OVasWIHAwEB89NFHAICGDRvi7NmzCAkJUTlOLpdj48aNyg/8b7/9No4dO6aSODVv3lyZdPn4+GD16tU4duxYsYmTubk5fvnlF1hZWaFp06b48ssvMWvWLHz11VeQSqVYsGABjh49Cn9/fwBA/fr1cfr0aaxfv16ZoADA119/jW7dugEAPvnkEwwYMAC5ubmQSCT4+uuvMXToUAQHByuPb9GihUoc/fv3x+TJkwEAH3/8MZYtW4bjx4+rJE4zZ85E3759AQDTpk3DsGHDcOzYMXTq1AkAMHbsWGzcuFF5fM+ePVWusX79ejg4OODEiRN4+eWXle3Dhw/HO++8o7ytSJzi4uLw5ptvwsfHB9u2bYO5uXmR/Thx4kQMGTKkyPsBoFatWsXev3//ftjY2ChvBwYG4qeffsK0adOU+ynVq1cP8+fPx8SJE4tMnGJiYjBr1iw0btwYQOF7QWHhwoV466238MEHHyjvW7lyJbp164Z169Zxk1siIqJiKJaGvDi+pFgasm5E61InT1yrXzQmTiWwNDNB5Jd9tTr2YnQKRm/4u8TjNr7TDu29HLW6tjZu3ryJvLw89OrVS6vjizrHi9PW/P391RKnevXqqYySuLu7IyEhQeWY5s2bq9zWdMyLWrRooTKK4u/vj8zMTDx8+BCZmZnIzs5WS7zy8/NVRptevLa7e+EPd0JCAurUqYPw8HCMHz++2Dief7xIJIKbm1uxz8/V1RUA0KxZM5W25x8THx+Pzz77DGFhYUhISIBMJkN2djZiYmJUztu2bVuNMQUEBKB9+/b47bffYGJS/HvC0dERjo4lv7eK06NHD5Vk1NLSEgBw9OhRLF68GLdu3UJ6ejoKCgqQm5uL7OxsjSNgM2bMwLhx47B582b07t0bb7zxBry9vQEUTuO7fv06tm7dqjxeEATI5XJER0ejSZMmZXoOREREVZVMLiB4X2TxS0P2RaJ3E1eYmui2KseQCVlVwMSpBCKRSOvpcl18asLdXoK4tFyNb2YRADd7Cbr41NTrcKfig21RFKMEz6+ZKm3xAcVUKwWRSAS5XK7zMbpQrKM5cOAAPDw8ir3W87dFosI+Vly7pH7SdL6Snp/iGi+2Pf+YUaNGITk5GStWrEDdunVhYWEBf39/lTVcAGBtba0xpgEDBmD37t2IjIxUSdA00cdUPWtrazRo0EB5Wy6XIyIiAgMHDsSkSZPw9ddfw9HREadPn8bYsWORn5+vMXGaN28ehg8fjgMHDuDQoUOYO3cuduzYgVdffRWZmZmYMGECpk6dqva40kwjJCIiqi4uRierFSF7UVxaLhp/HgJ7SzPYWZrBVmIKO8kLf7/QbmNhis/2RhSZkIkABO+LRICvW7WdtsfESY9MxCLMDfLFpC1XIAJU3niKt9fcIF+9v9l8fHxgaWmJY8eOYdy4cWr316xZEwAQGxuLGjVqACgsDvG8Jk2a4MKFCypt58+f12ucxbl27RpycnKUyc358+dhY2MDT09PODo6wsLCAjExMcppeApyuRzp6elaXaN58+Y4duyYynS48nDmzBmsXbsW/fv3BwA8fPgQSUlJWj9+0aJFsLGxQa9evRAWFgZfX98ij9XHVD1NwsPDIZfLsXTpUmUi/vvvv5f4uIYNG6Jhw4aYPn06hg0bhg0bNuDVV19F69atERkZqZKgERERUdHi03Ox9+pjbDgTrdXxBXIByVn5SM7KL/lgLQgAYtNycTE6Bf7eTno5Z2XDxEnPAv3csW5Ea7W5oW4GnBsqkUjw8ccf46OPPoK5uTk6deqExMRE/PPPPxg7diwaNGgAT09PzJs3D19//TVu376NpUuXqpxj6tSp6NSpE7799lu88sorOHz4sNo0PX0ZOXIkPDw8sHDhQmVbfn4+xo4di88++wz379/H3LlzMWXKFIjFYtja2mLmzJmYPn065HI5OnfujLS0NJw5cwY2NjZqUwyLMnfuXPTq1Qve3t4YOnQoCgoKcPDgQXz88ccGeZ4KPj4+2Lx5M9q2bYv09HTMmjVLq9Gv53377beQyWTo2bMnwsLClOuGXqSPqXqaeHl5QSqVYtWqVQgKCsKZM2fw/fffF3l8Tk4OZs2ahddffx1eXl549OgR/v77b7z22msACtePdejQAVOmTMG4ceNgbW2NyMhIhIaGYvXq1XqPn4iIqDLKlcpw+J847L7yGKfvJEKXonmrh7VCA1cbZOQWID1HWvh3rlR5O/2F209Sc5CQkVfieRMyih/tqsqMmjgtXLgQe/bswa1bt2BpaYmOHTti8eLFKgvxNdm5cyc+//xz3L9/Hz4+Pli8eLHy2/yKINDPHQG+brgQlYQHCamo6+KAl+o7G3RY8/PPP4epqSm++OILPHnyBO7u7pg4cSKAwmlk27dvx6RJk9C8eXO0a9cO8+fPxxtvvKF8fIcOHfDjjz9i7ty5+OKLL9C7d2989tln+Oqrr/Qea0xMjHLUQqFXr17w8fFB165dkZeXh2HDhqmUMf/qq69Qs2ZNLFy4EFFRUXBwcEDr1q3xySefaH3d7t27Y+fOnfjqq6+waNEi2NnZoWvXrvp6WkX6+eef8e6776J169bw9PTEggULMHPmTJ3Ps2zZMpXkqWHDhgaIVrNmzZph6dKlWLx4MWbPno2uXbti4cKFGDlypMbjTUxMkJycjJEjRyI+Ph7Ozs4YPHiwsjBH8+bNceLECcyZMwddunSBIAjw9vbGm2++WW7PiYiIqCISBAGXHjzF7suPcOB6LDLyCpT3ta1bA6+29sDKY3eQkJ5X7NKQfs3cdfrsee5eMob9WPJso2sPU9G/mTvMdFw/VRWIBF02C9KzwMBADB06FO3atUNBQQE+/fRTREREIDIyssj1HmfPnlV+aHv55Zexbds2LF68GFeuXIGfn1+J10xPT4e9vT3S0tJgZ2encl9ubi6io6Ph5eWll6peimlkdnZ2aokC6Qf72PAqQh/r+2ezIpFKpTh48CD69++vtsaO9IN9bHjsY8NjHxueofpY29LeD1OysefKY+y5+ggPkrOV7R4OlnittQcGt66Nes6Fn48VRRwAzUtDSlPEQSYX0HnxX0Wu1X9eXScrzAhoiKDmtSDWMjmrqO/h4nKDFxl1xOnFqWAbN26Ei4sLLl++XORIgKJs9qxZswAUjkQopvcUN3WIiIiIiKg8lVTaOzOvAAevx2L3lUe4EJ2iPMba3AT9m7ljcOvaeMnLUS05McTSEG3W6g9p64ljt+LxIDkb03aE4/sTUZjVtyF6NHJRFsyqyirUGqe0tDQAKHaNxrlz5zBjxgyVtr59+2Lv3r0aj8/Ly0Ne3n/zNRWFBKRSqVplOalUqiyJXJYqcAqKwTzFOUn/2MeGVxH6WC6XQxAESKXSEkuyVzaK30OlrXRJJWMfGx772PDYx4an7z4+/E883t9xTWNp74lbrqBtXQdEPElHrrTw/1aRCPCv74jBLWshwNdFWdVZJiuATKZ+/l6NnNHdpwsuPXiKhIw8uNhaoG3dGjARi0r9HHo1csaqoS0w/+AtxKX/9/nZzd4Cc/o1Rt+mrpgd6INN52Lw4+n7uBmbjjEbL6FtXQd8GOCDtnVrFHnuivoe1iUeo07Ve55cLsfAgQORmpqK06dPF3mcubk5Nm3ahGHDhinb1q5di+DgYMTHx6sdP2/ePJUNTxW2bdumVkLZ1NQUbm5u8PT0LHaTUSIqX/n5+Xj48CHi4uJQUFBQ8gOIiIiMSC4AwVdMkJoP/Ddeo5mLREB7FznaOguoYVEu4ZVILgD30kVIlwJ2ZoC3nYAXZ+RlSYGjT8Q4FSuCVCi809dBjpfryOGhecVNhZSdnY3hw4dX/Kl6z3vvvfcQERFRbNJUGrNnz1YZoUpPT4enpyf69OmjcY3Tw4cPYWNjo5d1FIIgICMjA7a2ttVi+NIY2MeGVxH6ODc3F5aWlujatWuVXOMUGhqKgICACjXnuyphHxse+9jw2MeGp88+vhCdgtTzl0o8bt7LjTG8vWel/QzzBoC49FysPh6FXVceIzJVjJtpYrzczA3TejVAXcf/Bikq6ntY221tgAqSOE2ZMgX79+/HyZMnUbt27WKPdXNzUxtZio+Ph5ubm8bjLSwsYGGhnr6bmZmpvWgymQwikQgikUgvi+AV05r0dT5Sxz42vIrQx4qfS00/t1VFVX5uFQX72PDYx4bHPjY8ffRxQqZ2079q2Egq/SwnTyczLH69BSZ088Z3obex/3os9l2Pw6GIeAxt74mpPX3gYvffl55iE1NcikkvsVhGedHltTZq4iQIAt5//3388ccfCAsLg5eXV4mP8ff3x7Fjx/DBBx8o20JDQ+Hv71/meBQdl52drfM+O0RkONnZhdWF+GGBiIgqspx8GX77Owar/7qr1fEutlVnFkX9mjZYPbw1JnZLwzeH/8WJ24nYcj4Guy4/wuiOXhjXqQ6uJYuwcOlJlfVT7gbc61TfjJo4vffee9i2bRv+/PNP2NraIi4uDgBgb2+vTFxe3Cx12rRp6NatG5YuXYoBAwZgx44duHTpEtavX1/meExMTODg4ICEhAQAgJWVVZmGTuVyOfLz85Gbm8vREANhHxueMftYEARkZ2cjISEBDg4OVa4wBBERVQ1p2VL8eu4+Npy9j5SsfACAWIQiN6xV7LXU3kv/m9Ybm5+HPTaNaY/zUclYEnILV2JS8f2Je9h0Nho5UjEA1U1249JyMWnLlVKVUC9vRk2c1q1bB6BwY9LnbdiwAaNHjwagvllqx44dsW3bNnz22Wf49NNP4ePjg71792q1h5M2FFP+FMlTWQiCgJycHFhaWlbauasVHfvY8CpCHzs4OBQ5HZeIiMhY4tNz8fPpaGw9/wBZ+YWl7zwdLfFuV2/YS0wxbUc4AM2lvecG+Rp1ipqhdajvhN2TOuLozQR8E3ILtxMyoalQhvCsNXhfJAJ83Sp0nxh9ql5JwsLC1NreeOMNvPHGGwaIqHAthbu7O1xcXMpcLlEqleLkyZPo2rUrpxgZCPvY8Izdx2ZmZhxpIiKiCuV+UhZ+OHkPuy8/Rr6scC1wYzdbTOrujQHN3GFqUvilv7mpWK97LVU2IpEIAb6usDI3wVs/XSjyOAFAbFouLkanwN/bqfwC1FGFKA5REZmYmJT5w5qJiQkKCgogkUj4od5A2MeGxz4mIqKKQiYXcDE6xWiFBSIep2HdiXs4dCNWOQ2vbd0amNzDW+MmsIF+7gjwdTNqzBVBUmZeyQcBSMjILfkgI2LiREREREQVXkhErNrojb4KC8jkAi5Ep+BykghO0Snwb+CiTG4EofC+tWH3cPJ2ovIxPRu7YFJ3b7SrV/w6JROxqEKPopQHbYtgVPRiGUyciIiIiKhCC4mIxaQtV/DiIg99FBZQTchM8OudS3C3l+DzAb4wMxVjbdhdXI1JBVBY8CGoRS1M7OaNJu7Fb5ZK/2nv5Qg3OwvEpedC0zqnylIsg4kTEREREVVYMrmA4H2RakkTUPbCAkUlZLFpuZi87YrytrmpGEPa1sa7XbxRx8kKpBsTsQif9W+MKTvCIULlLZbBxImIiIiIKqyL0Skq0/NepCgsMHT9ObjbW8LcVAxzUzEsFH+bKG6bKO8zNxHDVCzCvH3/aEzIFEQA3u1WH2M7e1X4aWQVXd+mrhjTUI6DcVYq+zhVpmIZTJyIiIiIqELKK5Dh8D+xWh379/2nAJ7q9foCgO4NXZg06UkLJwEfvdUVVx9lVMpiGUyciIiIiKhCuZuQgR0XH2LP1cfKDWVLMqZTPXjUsEJ+gRz5BXLkFcgK/y1T3H7ub5kcj59m415iVonnreiV3iqbylwsg4kTERERERldTr4MB27EYsfFGFx68N/IkYutObLzZcjMk2l8nKKwwJwBuq2ROXcvGcN+PF/icRxtIgUmTkRERERkNBGP07Dj7xj8efUJMvIKABSOSvRs7IKh7TzRrWFNHL0Zj0lbCos16KuwQHsvR7jbSxCXlqtxnVNlqfRG5YeJExERERHpjTab1KbnSvFn+BP89ncMIh6nK9vrOFrhzXaeeL1Nbbja/TfSE+jnjnUjWqvt41SWwgImYhHmBvli0pYrlbrSG5UfJk5EREREpBfFbVLbt6kbLj94iu0XH+LAjSfIlcoBAOYmYvT1c8Owdp7oUN8J4iISlUA/dwT4upWYlOnCEAkZVV1MnIiIiIiozIrbE2nilitws5M82wC1kI+LDYa2r4NXW3nA0dpcq2sYorCAIiE7dzcBR05dQJ8uL8G/gQtHmkgNEyciIiIiKpPiNqlViEvPhcRUjKAWtTC0fR20ruMAkahiJCcmYhFe8nJE8k0BL1Wi8thUvpg4EREREVGZlLRJrcLat1qjZxPXcoiISP/Exg6AiIiIiCo3bfc6UlTNI6qMmDgRERERUZlou9cR90SiyoyJExERERGViWJPpKKIUFhdj3siUWXGxImIiIiIykSxJ5Im3BOJqgomTkRERERUZoF+7qjrZKXW7mYvwboRrbknElV6rKpHRERERGX2IDkLD5KzIRYVVs/LK5DrZZNaooqCiRMRERERldn+67EAgE4NnDm6RFUSp+oRERERUZntu/YEABDUopaRIyEyDCZORERERFQmt+MzcCsuA2YmIvRt6mbscIgMgokTEREREZXJ/mejTd0ausDe0szI0RAZBhMnIiIiIio1QRCw79n6pqAWXNtEVRcTJyIiIiIqtX+epCM6KQsSMzF6N3E1djhEBsPEiYiIiIhKTVEUoldjV1hbsGAzVV1MnIiIiIioVARBUJYh5zQ9quqYOBERERFRqVyJScXj1BzYWJiieyMXY4dDZFBMnIiIiIioVBTT9Pr4ukJiZmLkaIgMi4kTEREREelMJhdw4IZimh43vaWqj4kTEREREensQnQyEjPyYG9phk4NnI0dDpHBMXEiIiIiIp3tu1Y42tTPzw3mpvxISVUf3+VEREREpBOpTI5DEZymR9ULEyciIiIi0snpu0lIzZbC2cYCHeo7GTsconLBxImIiIiIdKKopjegmRtMxCIjR0NUPpg4EREREZHWcqUyHPknHgCn6VH1wsSJiIiIiLQW9m8iMvMKUMtegtZ1ahg7HKJyw8SJiIiIiLS2/3rhNL2XW9SCmNP0qBph4kREREREWsnOL8CxmwkAgJebuxs5GqLyxcSJiIiIiLRy9GYCcqQy1HWyQjMPe2OHQ1SumDgRERERkVYU1fSCmteCSMRpelS9MHEiIiIiohKl5Uhx4t9EAKymR9UTEyciIiIiKtGRf+KQL5OjoasNGrnZGjsconLHxImIiIiISrTveiyAwml6RNUREyciIiIiKlZyZh7O3E0CUFiGnKg6YuJERERERMU6FBEHmVyAn4cdvJytjR0OkVEwcSIiIiKiYik2veU0ParOmDgRERERUZHi03NxIToFADCAm95SNcbEiYiIiIiKdOB6LAQBaFO3BmrXsDJ2OERGw8SJiIiIiIq0TzlNj6NNVL0xcSIiIiIijR6mZONqTCrEIqA/Eyeq5pg4EREREZFG+5/t3dShvhNcbCVGjobIuIyaOJ08eRJBQUGoVasWRCIR9u7dW+Jjtm7dihYtWsDKygru7u4YM2YMkpOTDR8sERERUTWz79qzaXrcu4nIuIlTVlYWWrRogTVr1mh1/JkzZzBy5EiMHTsW//zzD3bu3ImLFy9i/PjxBo6UiIiIqHq5m5CJyNh0mIpFCGzqZuxwiIzO1JgX79evH/r166f18efOnUO9evUwdepUAICXlxcmTJiAxYsXGypEIiIiompJsXdTZx9n1LA2N3I0RMZn1MRJV/7+/vj0009x8OBB9OvXDwkJCdi1axf69+9f5GPy8vKQl5envJ2eng4AkEqlkEqlBo1XcX5DX6c6Yx8bHvvYsNi/hsc+Njz2seGVdx8LgoD/hRcmTv2bulaL15bvY8OqqP2rSzwiQRAEA8aiNZFIhD/++AODBg0q9ridO3dizJgxyM3NRUFBAYKCgrB7926YmZlpPH7evHkIDg5Wa9+2bRusrLgXAREREdGLHmcBS66bwlQkYH5bGSwr1VftRNrLzs7G8OHDkZaWBjs7u2KPrVSJU2RkJHr37o3p06ejb9++iI2NxaxZs9CuXTv8/PPPGh+jacTJ09MTSUlJJXZOWUmlUoSGhiIgIKDIxI7Khn1seOxjw2L/Gh772PDYx4ZX3n28NPQOvj8ZjYAmLlg7vKXBr1cR8H1sWBW1f9PT0+Hs7KxV4lSpvj9YuHAhOnXqhFmzZgEAmjdvDmtra3Tp0gXz58+Hu7v6/gIWFhawsLBQazczMyu3F608r1VdsY8Nj31sWOxfw2MfGx772PDKo48FQcCBiDgAwCutPKrda8r3sWFVtP7VJZZKtY9TdnY2xGLVkE1MTAAU/pATERERUdlce5SGhyk5sDI3Qc/GLsYOh6jCMGrilJmZifDwcISHhwMAoqOjER4ejpiYGADA7NmzMXLkSOXxQUFB2LNnD9atW4eoqCicOXMGU6dORfv27VGrFvcXICIiIiorxd5NvZq4wsq8Uk1OIjIoo/40XLp0CT169FDenjFjBgBg1KhR2LhxI2JjY5VJFACMHj0aGRkZWL16NT788EM4ODigZ8+eLEdOREREpAdyuaAsQx7UXH0JBFF1ZtTEqXv37sVOsdu4caNa2/vvv4/333/fgFERERERVU9/309BfHoebCWm6NaoprHDIapQKtUaJyIiIiIynH3PRpv6NnWDhamJkaMhqliYOBERERERCmRyHLpRWE0vqAXXjhO9iIkTEREREeFcVDKSs/LhaG2Ojt5Oxg6HqMJh4kREREREymp6/fzcYGbCj4hEL2KNSSIiIqIykMkFXIxOQUJGLlxsJWjv5QgTscjYYekkr0CGkAhO0yMqDhMnIiIiolIKiYhF8L5IxKblKtvc7SWYG+SLQL/KU8771O0kpOcWwMXWAu3qORo7HKIKieOwRERERKUQEhGLSVuuqCRNABCXlotJW64gJCLWSJHpTlFNb0Bz90o3WkZUXpg4EREREelIJhcQvC8SmnajVLQF74uETF70fpUVRU6+DKGR8QA4TY+oOEyciIiIiHR0MTpFbaTpeQKA2LRcXIxOKb+gSumvWwnIzpfBw8ESrTwdjB0OUYXFxImIiIhIB5l5Bdhy/r5WxyZkFJ1cVRSKanpBLWpBJOI0PaKisDgEERERkRay8wuw6ewDrD95D0+zpVo9xsXWwsBRlU1GrhTH/00AAAS1qDzFLIiMgYkTERERUTFy8mXYeuEB1oXdQ3JWPgDAy8kKT7OlSMuRalznpLDm+F3UdbJGLQfL8glWS4oS6gdvPEFegRxezlbwdbczdlhEFRoTJyIiIiINcqUybL8Yg7Vh95CYkQcAqONohWm9fPBKy1o4ejMek7ZcgQhQSZ4Ut03FIpy+m4y+y07i85d98Ubb2hViKpymEuqJGfk4/E9cpSqhTlTeuMaJiIiI6Dl5BTJsPv8A3b8JQ/C+SCRm5MHDwRJLXmuOYx92w2ttasPURIxAP3esG9EabvYSlce72Uvw/YjWODy9K1rXcUBGXgE+2n0d72z8G3HFFJQoD0WVUM/MK6h0JdSJyhtHnIiIiIgASGVy7Lr8CKv/uovHqTkACjezndKzAd5o4wlzU/XvmwP93BHg64aL0SlIyMiFi60E7b0clXsh7ZzYET+fjsK3R24j7N9EBCw7gXlBTTG4tUe5jz4VV0JdIXhfJAJ83biXE5EGTJyIiIioylOs6dGU3BTI5Pjj6mOs/OsOHqYUJkwuthZ4r0cDDG3vCQtTk2LPbSIWwd/bqcj73u3qjZ6NXfDh79dw7VEaPtx5DYciYrHg1WZwsZNofJwh6FJCvajnQ1SdMXEiIiKiKk3Tmh53ewk+H+CLPJkMK47ewf3kbACAs405JnVvgLdeqgOJWfEJky4auNhi96SOWH8qCstD7+DozQT8ff8kvnylKQaWQxlwmVzAmbtJWh1bGUqoExkDEyciIiKqshRrel6cnhablovJ264ob9ewMsPEbt54278urMwN8/HI1ESMyd0boFdjV3y4MxwRj9MxbUc4Dt6IxfxBzVBTz6XLC2RyXIxOwYEbsTj8TxySMvO1epyLbfmNghFVJkyciIiIqErSZk2PCMCMPg3xTicv2FiUz8eiRm62+GNyJ3wfdg8r/7qDw//E42J0Cr4a5IeXm9cq07kLZHJcUCRLEXHK8ukAYCcxhVQmIEcq0/hYEQoLW7T3cixTDERVFRMnIiIiqpJKWtMDFK7raVvXsdySJgUzEzHe7+WDXk1c8eHOa7gZm44p267i0I04fPlKUzjZWKisy3KyMoW8iAxQKpPjfFQyDt6IxeF/4pHyXLLkYGWGvr5u6N/cHR29nXDsWQl1QL2EOgDMDfJlYQiiIjBxIiIioipJ27U6xlzT41vLDn++1wmrj9/FmuN3ceBGLM5HJeP1Nh7437VYlcTPwdwEZvXi8XLL2pDK5Dh7LxkHr8fiSGQcnmZLlcc5Wpujb1NX9G/mjg71nWBm8l81QEUJ9RfXfLnZSzA3yJf7OBEVg4kTERERVUnartUx9poec1MxZgQ0RB9fV3z4+zX8G5+BH05Gqx2Xmg9M2XENWy8+QmRsOtJy/kuWnKzN0dfPDQOaueMlL0eYmhS9VWdJJdSJSDMmTkRERFQltfdyhLu9BHFpuRrXOVW0NT1+Hvb4472OaP/1UWTmaVqHVJjYnItKBlBYATDQzw39/dzRvoRk6UXFlVAnIs2YOBEREVGVZCIWYW6QLyZuuaJ2X0Vd03PtYVoRSZOqzwc0wehOXhUqdqKqTvuvJoiIiIgqmUA/dwxt56nW7mYvwboRrSvcmh5t11s521owaSIqZxxxIiIioiotMSMPADC0nSf8vZ0q9JqeyrIui6g6YuJEREREVVauVIaz9wrXBI30rwffWnZGjqh4lW1dFlF1wql6REREVGX9fT8FOVIZXGwt0MTd1tjhlEixLgv4bx3WfwpTqYq2LououmDiRERERFXWiX8TAQDdGtaESFQ5kg3FXktu9qrT8RzMgVVDW1S4dVlE1QWn6hEREVGVFXa7MHHq3sjFyJHo5sW9lpysTJEYeR59m7oaOzSiaouJExEREVVJj55m425CJkzEInT2cTZ2ODp7fq8lqVSKgzeNHBBRNcepekRERFQlnXg22tTK0wH2lmZGjoaIKjsmTkRERFQlhf2rmKZX08iREFFVoNNUvdTUVPzxxx84deoUHjx4gOzsbNSsWROtWrVC37590bFjR0PFSURERKS1/AI5zt5NAlD51jcRUcWk1YjTkydPMG7cOLi7u2P+/PnIyclBy5Yt0atXL9SuXRvHjx9HQEAAfH198dtvvxk6ZiIiIqJiXbqfgqx8GZxtLODrXrH3biKiykGrEadWrVph1KhRuHz5Mnx9fTUek5OTg71792L58uV4+PAhZs6cqddAiYiIiLSlWN/UtaEzxNzziIj0QKvEKTIyEk5OTsUeY2lpiWHDhmHYsGFITk7WS3BEREREpfHf+iZO0yMi/dBqql5JSVNZjyciIiLSlyepOfg3PgNiEdClQeUrQ05EFZPOVfU2bdqEAwcOKG9/9NFHcHBwQMeOHfHgwQO9BkdERESkq5PPpum18HRADWtzI0dDRFWFzonTggULYGlpCQA4d+4c1qxZgyVLlsDZ2RnTp0/Xe4BEREREulBO02vIaXpEpD86lSMHgIcPH6JBgwYAgL179+K1117Du+++i06dOqF79+76jo+IiIhIa1KZHGeUZci5fxMR6Y/OI042NjbK4g9HjhxBQEAAAEAikSAnJ0e/0RERERHp4MqDp8jIK4CjtTmaedgbOxwiqkJ0HnEKCAjAuHHj0KpVK9y+fRv9+/cHAPzzzz+oV6+evuMjIiIi0lqYogy5D8uQE5F+6TzitGbNGvj7+yMxMRG7d+9WVtC7fPkyhg0bpvcAiYiIiLTFMuREZCg6jzg5ODhg9erVau3BwcF6CYiIiIioNOLTc3EzNh0iEdDFh2XIiUi/dE6cACA3NxfXr19HQkIC5HK5sl0kEiEoKEhvwRERERFp68SzaXrNPezhZGNh5GiIqKrROXEKCQnB22+/rSwQ8TyRSASZTKaXwIiIiIh0ceLZNL1unKZHRAag8xqn999/H0OGDEFsbCzkcrnKHyZNREREZAwFMjlO3VGsb2IZciLSP50Tp/j4eMyYMQOurq6GiIeIiIhIZ+EPU5GeWwAHKzO0qO1g7HCIqArSOXF6/fXXERYWZoBQiIiIiEpHUU2vi09NmLAMOREZgM5rnFavXo033ngDp06dQrNmzWBmZqZy/9SpU/UWHBEREZE2wm4nAAC6N+Q0PSIyDJ0Tp+3bt+PIkSOQSCQICwuDSPTftzoikYiJExEREZWrhIxcRDxOBwB0ZeJERAaic+I0Z84cBAcH45NPPoFYrPNMPyIiIiK9OnU7CQDg52GHmrYsQ05EhqFz5pOfn48333xTL0nTyZMnERQUhFq1akEkEmHv3r0lPiYvLw9z5sxB3bp1YWFhgXr16uGXX34pcyxERERUOYU927+pe0OWISciw9E5+xk1ahR+++03vVw8KysLLVq0wJo1a7R+zJAhQ3Ds2DH8/PPP+Pfff7F9+3Y0atRIL/EQERFR5SKTCyxDTkTlQuepejKZDEuWLMHhw4fRvHlzteIQ3333ndbn6tevH/r166f18SEhIThx4gSioqLg6OgIAKhXr57WjyciIqKq5dqjVKRmS2EnMUVLTwdjh0NEVZjOidONGzfQqlUrAEBERITKfc8XijCE//3vf2jbti2WLFmCzZs3w9raGgMHDsRXX30FS0tLjY/Jy8tDXl6e8nZ6euHiUalUCqlUatB4Fec39HWqM/ax4bGPDYv9a3jsY8MzZh//FRkHAOjk7QRBLoNULiv3GMoD38eGxz42rIrav7rEIxIEQTBgLFoTiUT4448/MGjQoCKPCQwMRFhYGHr37o0vvvgCSUlJmDx5Mnr06IENGzZofMy8efMQHBys1r5t2zZYWVnpK3wiIiIygqXXTRCTJcIwbxk6uFSIjzREVIlkZ2dj+PDhSEtLg52dXbHHVqrEqU+fPjh16hTi4uJgb28PANizZw9ef/11ZGVlaRx10jTi5OnpiaSkpBI7p6ykUilCQ0MREBCgNqWR9IN9bHjsY8Ni/xoe+9jwjNXHyVn58F8cBkEAznzUDS5VuKIe38eGxz42rIrav+np6XB2dtYqcdJqqt7EiRPx2WefoXbt2iUe+9tvv6GgoABvvfWWdtHqwN3dHR4eHsqkCQCaNGkCQRDw6NEj+Pj4qD3GwsICFhbqv0jNzMzK7UUrz2tVV+xjw2MfGxb71/DYx4ZX3n18PjoBggA0cbeDh6NNuV3XmPg+Njz2sWFVtP7VJRatEqeaNWuiadOm6NSpE4KCgtC2bVvUqlULEokET58+RWRkJE6fPo0dO3agVq1aWL9+famDL06nTp2wc+dOZGZmwsam8Bfk7du3IRaLtUrqiIiIqOoI+zcBAKvpEVH50Koc+VdffYXbt2+jU6dOWLt2LTp06IA6derAxcUFjRo1wsiRIxEVFYX169fj/PnzaN68uVYXz8zMRHh4OMLDwwEA0dHRCA8PR0xMDABg9uzZGDlypPL44cOHw8nJCe+88w4iIyNx8uRJzJo1C2PGjCmyOAQRERFVPXK5gJN3Cje+7d6QiRMRGZ7WVfVcXV0xZ84czJkzB0+fPkVMTAxycnLg7OwMb2/vUlXUu3TpEnr06KG8PWPGDACFe0Vt3LgRsbGxyiQKAGxsbBAaGor3338fbdu2hZOTE4YMGYL58+frfG0iIiKqvG48TkNKVj5sLUzRum4NY4dDRNWAzuXIAaBGjRqoUaPsv6S6d++O4mpTbNy4Ua2tcePGCA0NLfO1iYiIqPIK+7dw09tODZxhZqLVBBoiojLhbxoiIiKqdMJuc30TEZUvJk5ERERUqTzNykf4w1QAQDcmTkRUTpg4ERERUaVy6m4SBAFo5GoLd3sWhyKi8sHEiYiIiCoVliEnImMoVeJUUFCAo0eP4ocffkBGRgYA4MmTJ8jMzNRrcERERETPk8sFnLxdWBiC0/SIqDzpXFXvwYMHCAwMRExMDPLy8hAQEABbW1ssXrwYeXl5+P777w0RJxEREREiY9ORlJkPa3MTtK3raOxwiKga0XnEadq0aWjbti2ePn2qsunsq6++imPHjuk1OCIiIqLnKabpdWzgDHNTrjggovKj84jTqVOncPbsWZibm6u016tXD48fP9ZbYEREREQvUuzfxPVNRFTedP6qRi6XQyaTqbU/evQItra2egmKiIiI6EVp2VJciXkKAOjWkIkTEZUvnROnPn36YPny5crbIpEImZmZmDt3Lvr376/P2IiIiKo0mVzAuXvJ+DP8Mc7dS4ZMLhg7pArt9N0kyAWggYsNatewMnY4RFTN6DxVb+nSpejbty98fX2Rm5uL4cOH486dO3B2dsb27dsNESMREVGVExIRi+B9kYhNy1W2udtLMDfIF4F+7kaMrOJSliHnaBMRGYHOiVPt2rVx7do17NixA9evX0dmZibGjh2Lt956S6VYBBEREWkWEhGLSVuu4MXxpbi0XEzacgXrRrRm8vQCQRBw4rZifZOLkaMhoupI58QJAExNTTFixAh9x0JERFTlyeQCgvdFqiVNACAAEAEI3heJAF83mIhF5RxdxXUzNgMJGXmwNDNBO68axg6HiKqhUiVOT548wenTp5GQkAC5XK5y39SpU/USGBERUVV0MTpFZXreiwQAsWm5uBidAn9vp/ILrIILu/2sDLm3EyxMTYwcDRFVRzonThs3bsSECRNgbm4OJycniET/fRsmEomYOBERERUjIaPopOl5D5KzmDg9h2XIicjYdE6cPv/8c3zxxReYPXs2xGJuPEdERKQLF1uJVsd9+scN/Bn+BAG+rgjwdYWnY/WtIpeeK8WVB4oy5FzfRETGoXPilJ2djaFDhzJpIiIiKoX2Xo5wt5cUO13PVCxCgVzAuahknItKxpf7I9HYzRZ9fF0R4OsGPw87lRkfmsjkAi5GpyAhIxcuthK093KstGumzt5NQoFcQH1na9Rxqr4JJBEZl86J09ixY7Fz50588sknhoiHiIioSjMRizA3yBcTt1xRu0+R1qwe3gpN3O0QGhmP0Mh4/H0/BbfiMnArLgMr/7oLd3sJejcpHInqUN8J5qaqX2ZWtVLniml63ThNj4iMSOfEaeHChXj55ZcREhKCZs2awczMTOX+7777Tm/BERERVUUNXW01tru9kNyM61If47rUx9OsfPx1KwGhkfE4eScRsWm52Hz+ATaffwBbC1N0a1QTAb6u6N7IBefuJVWpUueCIDy3vonT9IjIeEqVOB0+fBiNGjUCALXiEERERFS8H05EAQB6NqqJ8V29S5xOV8PaHK+1qY3X2tRGrlSGs/eSno1GJSApMw/7r8di//VYmIgAE7G4SpU6vx2fibj0XFiYivGSl6OxwyGiakznxGnp0qX45ZdfMHr0aAOEQ0REVLXFpuVgz9VHAID3evqgTV3d9iSSmJmgZ2NX9Gzsiq8HCQh/lKqc0nc3IRMymbzIx1bGUudh/xaWIff3doLEjGXIich4dE6cLCws0KlTJ0PEQkREVOX9eDIaUpmAl7wcdU6aXiQWi9C6Tg20rlMDHwc2xk+nojD/wM0SH/frufvIK5ChRW0H1LA21+mamopOGJJyml5Drm8iIuPSOXGaNm0aVq1ahZUrVxoiHiIioiorJSsf2y/GAAAm92ig9/M3rWWv1XGHIuJwKCIOAFDH0QotPB3QorY9Wno6oGkte1iaax7ZKaroxJx+jcoevAaZeQW49CAFANCN65uIyMh0TpwuXryIv/76C/v370fTpk3VikPs2bNHb8ERERFVJRvPRCNHKoOfhx26+jjr/fyKUudxabka1zkBgJ3EFD0bu+D64zREJWYhJiUbMSnZ2HftCYDCqn+NXG3RwtMBLT3t0by2A3xcbHD0ZnyRRSfe33EN7zQUob+en8/Zu0mQygTUdbKCl7O1ns9ORKQbnRMnBwcHDB482BCxEBERVVkZuVJsPHsfADC5ewODFFRSlDqftOUKRIBKkqO42pLXmyur6qXlSHHjURquPUpF+MPCP4kZeYiMTUdkbDq2Xyx8jMRUDLkgFFt0Ys99MT6SCzDTcExphd3mND0iqjh0Tpw2bNhgiDiIiIiqtG0XYpCeW4D6Na3Rt6mbwa4T6OeOdSNaq02pe7HUOQDYW5qhs48zOj8b/RIEAXHpubj2MBXhD9Nw/VEqrj9KQ2ZeQbHXFACk5otw6cFTdG7oqpfnIQgCTrAMORFVIDonTkRERKSbXKkMP52OBgBM7OZt8FLggX7uCPB1UyviUNJ1RSIR3O0t4W5vqUyw5HIBP52OwoKDt0q8bkJGnl7iB4B7iZl4nJoDc1MxOtSvHBUAiahq0ypxat26NY4dO4YaNWqgVatWxU4vuHJFfSd0IiKi6mzX5UdIzMhDLXsJBrX0KJdrmohFeik5LhaL0MzDQatj152IgsTcDH18XWFqIi7TdRXV9F7yciyyWAURUXnSKnF65ZVXYGFhAQAYNGiQIeMhIiKqUgpkcvxw8h4AYHzX+jA3LVtCYQzaFJ0ABNxJyMLkrVfg4WCJdzrVw5B2nrCTlG7VUxin6RFRBaNV4jR37lyMGTMGK1aswNy5cw0dExERUZWx/3osHqbkwNHaHEPb1TF2OKWiTdGJN+vL4ejpg+1/P8Tj1BzMP3ATy4/ewZC2nninUz14Olppfb3s/AJcjH5WhpyFIYiogtD6a69NmzYhJyfHkLEQERFVKXK5gHVhhaNNYzrVq9RTzhRFJ9zsJSrtbvYSrBraAv6uAqb3boBzs3th4eBmaOBig8y8AvxyJhrdvjmOiZsv4+/7KRCEosesFM7dS0a+TI7aNSzhXZNlyImoYtC6OIQ2v+iIiIjoP8duJeDf+AzYWJjibf96xg6nzIoqOiGXFeDgg8JjJGYmGNa+Doa288SJ24n4+XQ0Tt1JQsg/cQj5Jw7Na9tjbGcv9G/mDrMX1kHJ5AIuRqdgw5n7AICuDZ0NUradiKg0dKqql5GRAYlEUuwxdnZ2ZQqIiIioKhAEAWvD7gIARnSoC3tLfe5wZDyaik7IZerHiUQidG/kgu6NXHA7PgO/nI7GnquPcf1RGqbtCMfCg7cwqmM9DG9fB/ZWZgiJiFUroX7oRhy6+tRUKaFORGQsOiVODRs2LPI+QRAgEokgk2n47UlERFTNnI9KwdWYVJibijGmcz1jh2NUDV1tsei15pjVtxG2XojBr+ceIC49F4tDbmHlsTto7+WIE882u31earYUk7ZcwboRrZk8EZHR6ZQ47dq1C46OjoaKhYiIqMpQjDa92dYTLrbFz9aoLpxsLDC1lw8mdKuPfddi8dOpKNyKy9CYNAGFRShEAIL3RSLA183g+18RERVHp8SpU6dOcHFhWVAiIqLiXH+UilN3kmAiFuHdrvWNHU6FY2Fqgtfb1MZrrT3w86lozD94s8hjBQCxabm4GJ2il32piIhKq/JtJkFERFTBrT1eWElvYItaOpXhrm5EIhFq2llodWxCRm7JBxERGZDWiVPdunVhYlJ5y6gSERGVh7sJGTgcGQcAmNTd28jRVHzaTmPkdEciMjatp+pFR0cbMg4iIqIqYV1YFAQBCPB1RUNXW2OHU+G193KEu70EcWm50LTxiQiFe0W19+IaayIyLk7VIyIi0pPHqTn4M/wxAGAyR5u0YiIWYW6QL4DCJOl5ittzg3xZGIKIjI6JExERkZ78eDIKBXIBHb2d0KpODWOHU2kE+rlj3YjWcLNXnY7nZi9hKXIiqjB0qqpHRERVn0wu4GJ0ChIycuFiWzhFit/2lywpMw/bL8YAAN7r0cDI0VQ+gX7uCPB143uPiCosJk5ERKQUEhGL4H2RiE37r4KZu70Ec4N8+a1/CTaciUZegRwtatujI8tml4qJWMSS40RUYZUqcTp27BiOHTuGhIQEyOVylft++eUXvQRGRETlKyQiFpO2XFFboB+XlotJW67oZcpUVR3NSs+V4tezDwAAk7o3gEhU+Z8TERGp0jlxCg4Oxpdffom2bdvC3d2d/zkQEVUBMrmA4H2RGquaCShcpB+8LxIBvm6lTnSq8mjWlvMPkJFXgAYuNujj62rscIiIyAB0Tpy+//57bNy4EW+//bYh4iEiIiO4GJ2iktC8SAAQm5aLP64+Rq/GLrC3NINYhwSqPEazjCVXKsMvpwu37JjUzVunfiEiospD58QpPz8fHTt2NEQsRERkJAkZRSdNz5u58xqAwrUoNazM4Ght/sIfCzg9+7eTtTkcbcxhLzHDvP8ZdjTLmH6/9BBJmfnwcLDEwJa1jB0OEREZiM6J07hx47Bt2zZ8/vnnhoiHiIiMwMVWUvJBACzNxMiRyiGTC0jKzEdSZn6Zr60YzboYnVLpCgNIZXL8cCIKADChW32YmXCXDyKiqkrnxCk3Nxfr16/H0aNH0bx5c5iZmanc/9133+ktOCIiKh/tvRzhbi9BXFquxpEhEQr31Dn9cU/I5AKeZucjOTMfKVn5SMnOR0pmHlKy8pGc9azthT+azvkibUe9KpL/hT/B49QcONuYY0hbT2OHQ0REBqRz4nT9+nW0bNkSABAREaFyHwtFEBFVTiZiEeYG+WLSlitq9yl+s88N8oWJWAQTsQiudhK42mk3SnXmbhLe+ulCicdVtv9B5HIB607cAwCM6ewFiZmJkSMiIiJD0jlxOn78uCHiICIiIwv0c8fYLl746VS0SrtbGSvfdajvVOxolsIHv4Xj5J0kTO7ujfo1bUp1rfJ0JDIedxMyYWthihEd6ho7HCIiMrAybYD76NEjAEDt2rX1EgwRERmXorJeUIta6N3ERS97LT0/miUCVJInxe0m7ra4GZuBXZcfYfeVRxjQzB3v9WiAJu52ZXk6BiMIAtaF3QUAjOxYF3YSsxIeQURElZ3Oq1jlcjm+/PJL2Nvbo27duqhbty4cHBzw1VdfqW2GS0RElUdOvgzHbyUAAMZ19sIrLT3g7+2kl0p3gX7uWDeiNdzsVaf3udlL8P2I1jg0rSv2vtcJvZu4QhCA/ddj0W/FKYz/9RKuPUwt8/X17ey9ZFx7lAYLUzHe6eRl7HCIiKgc6DziNGfOHPz8889YtGgROnXqBAA4ffo05s2bh9zcXHz99ddan+vkyZP45ptvcPnyZcTGxuKPP/7AoEGDtHrsmTNn0K1bN/j5+SE8PFzXp0FERC84cTsR2fkyeDhYonlte72fP9DPHQG+brgYnYKEjFy10ayWng74aVRbRD5Jx5qwuzh4IxahkfEIjYxH14Y1MaVHA7T3ctR7XKWx5njhaNOw9nXgbGNh5GiIiKg86Jw4bdq0CT/99BMGDhyobGvevDk8PDwwefJknRKnrKwstGjRAmPGjMHgwYO1flxqaipGjhyJXr16IT4+Xqf4iYhIs5CIWABAoJ+bwYr9mIhFJZYc961lhzXDW+NuQibWhd3D3vDHOHk7ESdvJ6K9lyPe79kAnRs4q8UokwtFJmX6dDXmKc7eS4apWITxXevr/fxERFQx6Zw4paSkoHHjxmrtjRs3RkpKik7n6tevH/r166drCJg4cSKGDx8OExMT7N27V+fHExGRqrwCGY7dLJym17+Zm5GjKdTAxQZLh7TAB719sO7EPey69AgXo1Pw9s8X0cLTAVN6NEDvJi4QiUQIiYhF8L5I5RotAHB/VtSiVyNnvca1Nqywkt6gVh7wcLDU67mJiKji0jlxatGiBVavXo2VK1eqtK9evRotWrTQW2BF2bBhA6KiorBlyxbMnz+/xOPz8vKQl5envJ2eng4AkEqlkEqlBotTcY3n/yb9Yx8bHvvYsCpK/574NxEZeQVwtbWAn5uN0eN5nputGYJfboyJXerh5zP38dulR7j2MBXjf72Exq426OjthA1nH6hV7ItLy8WkLVew/A0/APrp4zvxmQiNjIdIBIzrVLdC9ZMxVZT3cVXGPjY89rFhVdT+1SUekSAI2uxLqHTixAkMGDAAderUgb+/PwDg3LlzePjwIQ4ePIguXbroFq0iEJGoxDVOd+7cQefOnXHq1Ck0bNgQ8+bNw969e4td4zRv3jwEBwertW/btg1WVlalipWIqKrZeleMi4lidHGT43Wvil3oJ0MKhD0R41ScCHlyxVQ8AZp3ghLgYA7MbS1DaWftyQXgXroI6VLgfLwIt9PFaO4ox9hGFbufiIioZNnZ2Rg+fDjS0tJgZ1d8JVedR5y6deuG27dvY82aNbh16xYAYPDgwZg8eTJq1apVuoi1IJPJMHz4cAQHB6Nhw4ZaP2727NmYMWOG8nZ6ejo8PT3Rp0+fEjunrKRSKUJDQxEQEAAzM5aqNQT2seGxjw2rIvSvVCbHF+FhAAowoX97vFRBCjAU500AqdlSLDz0L/aEP0HR2+eKkJoP3E0T4b03euvcx4f/icfCg7cQl56n0h7YthH6d2E1PYWK8D6u6tjHhsc+NqyK2r+K2WjaKNU+TrVq1dKpCIQ+ZGRk4NKlS7h69SqmTJkCoLA0uiAIMDU1xZEjR9CzZ0+1x1lYWMDCQr3ikZmZWbm9aOV5reqKfWx47GPDMmb/notORFpOAZxtzOHfwMUgBRUMoaa9Gbo1dnmWOBVv7U0xDqy5AE9HK3g6WqGOoxU8a1jB09ESdRytYG9pplZsIiQiFu/vuKZx095vjtxBfRfbUm8KXFXx94ThsY8Nj31sWBWtf3WJRavE6fr16/Dz84NYLMb169eLPbZ58+ZaX1wXdnZ2uHHjhkrb2rVr8ddff2HXrl3w8uI3f0REpXEoIg4A0KepW6VJmhRcbCUlHwRAgAj3k7NxPzlb4/22FqbPkipLeNawQm1HS6w4ekdj0qQQvC8SAb6Vr8+IiKh0tEqcWrZsibi4OLi4uKBly5YQiUTQtDRKJBJBJpNpffHMzEzcvXtXeTs6Ohrh4eFwdHREnTp1MHv2bDx+/Bi//vorxGIx/Pz8VB7v4uICiUSi1k5ERNopkMlx5J/CxKmfX8WopqeL9l6OcLeXIC4tV2OSIwLgameB8d5ZaNSqA56k5+NhSnbhn6c5iEnJRmJGHjLyChAZm47IWO2mbAgAYtNycTE6pcTy6kREVDVolThFR0ejZs2ayn/ry6VLl9CjRw/lbcVapFGjRmHjxo2IjY1FTEyM3q5HRESqLt5PQXJWPhyszNChfuVLAEzEIswN8sWkLVcgAlSSJ8U40Gf9G0P24DJe8nLUOCUjJ1+GR0+z8fBpNh6mFCZTF6OTceNxyUlUQkZuiccQEVHVoFXiVLduXeW/Hzx4gI4dO8LUVPWhBQUFOHv2rMqxJenevbvGkSuFjRs3Fvv4efPmYd68eVpfj4iIVIU8m6YX0MQVZiZiI0dTOoF+7lg3orXaPk5uz+3jdPBB0Y+3NDeBj6stfFxtlW3n7iVj2I/nS7y2tlMFiYio8tO5OESPHj0QGxsLFxcXlfa0tDT06NFDp6l6RERkPHK5oEyc+jer3EUOAv3cEeDrhovRKUjIyIWLrQTtvRxhIhaVas8QbaYAutkXXoOIiKoHnRMnQRDUKg8BQHJyMqytrfUSFBERGd6VmKdIyMiDrYUpOjaofNP0XmQiFultvZE2UwDnBvmyMAQRUTWideI0ePBgAIUFIEaPHq1S4lsmk+H69evo2LGj/iMkIiKDOHijcLSpt68rLExNjBxNxVPSFECWIiciql60Tpzs7e0BFI442drawtLSUnmfubk5OnTogPHjx+s/QiIi0jtBEBASEQugclbTKy/FTQEkIqLqRevEacOGDQCAevXqYebMmZyWR0RUiV17lIYnabmwMjdB14Y1jR1OhabPKYBERFR56bzGae7cuYaIg4iIytGhZ6NNPRu7QGLGaXpEREQl0Tlx8vLy0lgcQiEqKqpMARERkWEJgoBDNxSb3nKdDhERkTZ0Tpw++OADldtSqRRXr15FSEgIZs2apa+4iIjIQP55ko6YlGxIzMTo3ojT9IiIiLShc+I0bdo0je1r1qzBpUuXyhwQEREZlmLvpm4Na8LaQuf/BoiIiKolvW0T369fP+zevVtfpyMiIgMQBAEHn61vquyb3hIREZUnvSVOu3btgqMjd1AnIqrI7iRkIioxC+YmYvRs7GLscIiIiCoNnedotGrVSqU4hCAIiIuLQ2JiItauXavX4IiISL8O3igcberi4wxbiZmRoyEiIqo8dE6cBg0apHJbLBajZs2a6N69Oxo3bqyvuIiIyAAU65v6cZoeERGRTriPExFRNRGVmIlbcRkwFYsQ0MTV2OEQERFVKjqvcTp48CAOHz6s1n748GEcOnRIL0EREZH+HXo22tSxgTPsrThNj4iISBc6J06ffPIJZDKZWrsgCPjkk0/0EhQREenfoWfV9Pr5uRk5EiIiospH58Tpzp078PX1VWtv3Lgx7t69q5egiIhIv2KSsxHxOB1iEdDHl9P0iIiIdKVz4mRvb4+oqCi19rt378La2lovQRERkX6F/FM42vSSlxOcbCyMHA0REVHlo3Pi9Morr+CDDz7AvXv3lG13797Fhx9+iIEDB+o1OCIi0o+DNwrXN/Vvxml6REREpaFz4rRkyRJYW1ujcePG8PLygpeXF5o0aQInJyd8++23hoiRiIjK4ElqDsIfpkIkAvo2ZeJERERUGjqXI7e3t8fZs2cRGhqKa9euwdLSEs2bN0fXrl0NER8REZWRYu+mtnVrwMVOYuRoiIiIKiedEycAEIlE6NOnD7p27QoLCwuIRCJ9x0VERHqi3PTWj5veEhERlZbOU/Xkcjm++uoreHh4wMbGBtHR0QCAzz//HD///LPeAyQiotJLyMjF3w9SAACBLENORERUajonTvPnz8fGjRuxZMkSmJubK9v9/Pzw008/6TU4IiIqm8P/xEMQgJaeDqjlYGnscIiIiCotnROnX3/9FevXr8dbb70FExMTZXuLFi1w69YtvQZHRERlc+gGN70lIiLSB50Tp8ePH6NBgwZq7XK5HFKpVC9BERFR2SVn5uF8VDIArm8iIiIqK50TJ19fX5w6dUqtfdeuXWjVqpVegiIiorILjYyHXACa1rJDHScrY4dDRERUqelcVe+LL77AqFGj8PjxY8jlcuzZswf//vsvfv31V+zfv98QMRIRUSkcjFBsesvRJiIiorLSecTplVdewb59+3D06FFYW1vjiy++wM2bN7Fv3z4EBAQYIkYiItJRWrYUZ+8mAWA1PSIiIn0o1T5OXbp0QWhoqFq7IAjc04mIqAIIvRmPArmARq628K5pY+xwiIiIKj2dR5xGjx6NrKwstfb79++ja9euegmKiIjKJiTiWTW9ZhxtIiIi0gedE6dr166hefPmOHfunLJt06ZNaNGiBZydnfUaHBER6S4jV4qTtwun6bGaHhERkX7oPFXv4sWL+PTTT9G9e3d8+OGHuHv3Lg4dOoTvvvsO48ePN0SMRESkg79uJSBfJkf9mtZo6MppekRERPqgc+JkZmaGb775BlZWVvjqq69gamqKEydOwN/f3xDxERGRjg7dKKym18/PjetOiYiI9ETnqXpSqRQffvghFi9ejNmzZ8Pf3x+DBw/GwYMHDREfEZHByeQCzt1Lxp/hj3HuXjJkcsHYIZVadn4Bwm4nAOA0PSIiIn3SecSpbdu2yM7ORlhYGDp06ABBELBkyRIMHjwYY8aMwdq1aw0RJxFVczK5gIvRKUjIyIWLrQTtvRxhIi77aEpIRCyC90UiNi1X2eZuL8HcIF8EVsLEI+zfRORK5fB0tETTWnbGDoeIiKjKKFXitHLlSlhbWwMARCIRPv74Y/Tp0wdvv/223gMkIjJUchMSEYtJW67gxfGluLRcTNpyBetGtK50ydPBG4XV9Pr7uXOaHhERkR7pPFXv559/ViZNz2vVqhUuX76sl6CIiBQUyc3zSRPwX3KjKLutK5lcQPC+SLWkCYCyLXhfZKWatpcrleH4rcJpetz0loiISL+0HnH6/fffMWjQIJibmwMAHj16hFq1akEsLsy9srOzsXr1anz00UeGiZSIqp2SkhsRCpObTg2ckZ0vQ0ZuATJypcjMK0BGbgEycwuQkfesLfdZW14B0nOliE3LUUvGXjx/bFouLkanwN/byUDPUL9O3k5EVr4MtewlaOnpYOxwiIiIqhStE6dhw4YhNjYWLi4uAABfX1+Eh4ejfv36AICMjAzMnj2biRMR6c3F6BStkptm844YLIaEjKKvX9GERBRW0wvkND0iIiK90zpxEgSh2NtERPqmS9IiFgG2EjPYWJjCVqL4U3jbRnHb4r+2J6k5WBp6u8TzOttYlOUplJv8AjlCb8YDAPo14zQ9IiIifdO5OAQRUXlxsZVoddzGd9qhW8OaOo2yyOQCtl2MQVxarsapgAoLD97E/FebVfipb2fuJSEjtwAuthZoU6eGscMhIiKqcnQuDkFEVF7aeznC3V6CotIhEQqr63Xx0S1pAgATsQhzg3yV53nxvAAgMRMj4kk6Xl17BrP33MDTrHydrlGeDj2rpte3qRvEeijTTkRERKp0GnE6fPgw7O3tAQByuRzHjh1DREQEACA1NVXvwRFR9aZIbiZtuaJ2nyI1mBvkW+r9nAL93LFuRGu1Uuduz0qdt6nriIWHbmLPlcfYfjEGIRGx+KRfY7zRxrNCJSdSmRxHIjlNj4iIyJB0SpxGjRqlcnvChAkqt7kYmYj0LdDPHR/0bohlR1XXI7npaZPaQD93BPi6Fbm57ndDWmJouzr4fG8E/o3PwMe7b+C3vx/iq0F+aFrLvkzX1pcLUSlIzZbC0doc7es5GjscIiKiKknrxEkulxsyDiKiIj3NLpwi162hMwa3rq2W3JSViVhUbMnx9l6O2D+1MzadvY9lobdxJSYVQatOY6R/Pczo0xB2EjO9xFFaByMU0/RcYWrCGdhERESGwP9hiahCEwQBR/4pLLM90r8eXmnpAX9vJ70lTdoyMxFjXJf6OPZhd7zc3B1yAdh49j56fnsCe68+NlqlUZn8v/4p6+gbERERFY2JExFVaDcep+FJWi6szU3QqYGzscOBm70Eq4e3xpaxL6G+szWSMvPwwW/hGPbjedyJzyi3OGRyAefuJWP50dtIysyHncQUHSvJRr1ERESVEcuRE1GFdvjZaEr3Ri6QmJkYOZr/dPZxxqEPuuCnU9FY9dcdnI9KQb8VpzC2sxem9vKBtUXhr1eZXChy/VRphUTEqhW0kMoEHLsZz1EnIiIiA2HiREQVWkhEYeLUp6mrkSNRZ2Fqgvd6NMDAFrXw5f5IhEbG44eTUfjftSf4/GVfiAB8uV81wXEvY1GLkIhYTNpyRW3vqRypDJO2XMG6Ea2ZPBERERkAp+oRUYV1NyET9xKzYG4iRs/GLsYOp0iejlb4cWRb/DK6LTwdLRGblovJW69g0tYrKkkTAMSl5WLSlisIeVbQQRcyuYDgfZHFbtgbvC8SMrlx1lsRERFVZaUacUpNTcWuXbtw7949zJo1C46Ojrhy5QpcXV3h4eGh7xiJqJpSTNPr2MAJtkauXKeNno1d0dHbGauP38Xqv+5qPEaR0ny8+wYePs1BXn4BIh6K8c+R25DKgbwCOfIL5MgrkCNPKiv8u6Dw75TMfLVE7MVzx6bl4mJ0SrFVAomIiEh3OidO169fR+/evWFvb4/79+9j/PjxcHR0xJ49exATE4Nff/3VEHESUTWkSJz6Nq08m7pKzEzQydu5yMRJIS1Hiq8P3Hx2Sww8uq+3GBIyik6uiIiIqHR0TpxmzJiB0aNHY8mSJbC1tVW29+/fH8OHD9fpXCdPnsQ333yDy5cvIzY2Fn/88QcGDRpU5PF79uzBunXrEB4ejry8PDRt2hTz5s1D3759dX0aRFTBPUnNwfVHaRCJgN5NKt76puJom7i0ruMAL2crxD1+BJ/69WBpYQYLUzEsTE0K/zZ77t+mYkQlZWHRoVslntfFVlLWp0BEREQv0Dlx+vvvv/HDDz+otXt4eCAuLk6nc2VlZaFFixYYM2YMBg8eXOLxJ0+eREBAABYsWAAHBwds2LABQUFBuHDhAlq1aqXTtYmoYlPsTdS2bg3UtLUwcjS60TZxmdW3MdrWscPBgzHo378xzMyKn44okwvYdPY+4tJyNa5zEqGwXHp7L0fdgyYiIqJi6Zw4WVhYID09Xa399u3bqFmzpk7n6tevH/r166f18cuXL1e5vWDBAvz555/Yt28fEyeiKubwP/EAKtc0PYX2Xo5wt5doleDIZQVan9dELMLcIF9M2nIFIkDl3IoC53ODfMt9c2AiIqLqQOfEaeDAgfjyyy/x+++/AwBEIhFiYmLw8ccf47XXXtN7gMWRy+XIyMiAo2PR367m5eUhLy9PeVuR9EmlUkilUoPGpzi/oa9TnbGPDc8YfZySlY8L0ckAgF6NnCvl6zunXyO8v+NakQnOnH6NIJcV6Ny/vRo5Y9XQFph/8Bbi0v/73eZmb4E5/RpX2v4yJP6eMDz2seGxjw2PfWxYFbV/dYlHJAiCTnVr09LS8Prrr+PSpUvIyMhArVq1EBcXB39/fxw8eBDW1tY6BwwUJmAlrXF60ZIlS7Bo0SLcunULLi6aSxXPmzcPwcHBau3btm2DlZVVqWIlIsM6nyDC9nsm8LAS8FELmbHDKbVrySLsuS9Gav5/I0AO5gIG15OjhVPZSobLBeBeugjpUsDODPC2E8CBJiIiIt1kZ2dj+PDhSEtLg52dXbHH6pw4KZw+fRrXr19HZmYmWrdujd69e5cqWGUgOiZO27Ztw/jx4/Hnn38We21NI06enp5ISkoqsXPKSiqVIjQ0FAEBASWuXaDSYR8bnjH6eMKWq/jr30RM7emN93t4l8s1DUUmF3DpwVMkZOTBxdYCbevWUJlKx/ew4bGPDY99bHjsY8NjHxtWRe3f9PR0ODs7a5U4lWofJwDo3LkzOnfuXNqHl8mOHTswbtw47Ny5s8SEzcLCAhYW6gvLzczMyu1FK89rVVfsY8Mrrz7OyivA6XuF0/QGNPeo9K+rGYDODUuuCsj3sOGxjw2PfWx47GPDYx8bVkXrX11i0TlxWrlypcZ2kUgEiUSCBg0aoGvXrjAxMdH11FrZvn07xowZgx07dmDAgAEGuQYRGU/Yv4nIL5CjnpMVGrraGDscIiIiIgClSJyWLVuGxMREZGdno0aNGgCAp0+fwsrKCjY2NkhISED9+vVx/PhxeHp6FnuuzMxM3L373yaR0dHRCA8Ph6OjI+rUqYPZs2fj8ePHyk11t23bhlGjRmHFihV46aWXlOXPLS0tYW9vr+tTIaIK6PlNb0UiLtohIiKiikGs6wMWLFiAdu3a4c6dO0hOTkZycjJu376Nl156CStWrEBMTAzc3Nwwffr0Es916dIltGrVSllKfMaMGWjVqhW++OILAEBsbCxiYmKUx69fvx4FBQV477334O7urvwzbdo0XZ8GEVVA+QVyHL+VAADoUwnLkBMREVHVpfOI02effYbdu3fD2/u/BdsNGjTAt99+i9deew1RUVFYsmSJVqXJu3fvjuJqU2zcuFHldlhYmK7hElElcvZeEjLyCuBia4FWng7GDoeIiIhISecRp9jYWBQUqG/YWFBQoJw6V6tWLWRkZJQ9OiKqVhTT9Po0dYWYtbWJiIioAtE5cerRowcmTJiAq1evKtuuXr2KSZMmoWfPngCAGzduwMvLS39RElGVJ5MLCI2MB1C4vomIiIioItE5cfr555/h6OiINm3aKEt9t23bFo6Ojvj5558BADY2Nli6dKnegyWiqutKzFMkZebDTmKKDvWdjB0OERERkQqd1zi5ubkhNDQUt27dwu3btwEAjRo1QqNGjZTH9OjRQ38RElG1cDiicJpe7yauMDPR+TsdIiIiIoMq9Qa4jRs3RuPGjfUZCxFVU4IgIES5vonT9IiIiKjiKVXi9OjRI/zvf/9DTEwM8vPzVe777rvv9BIYEVUfkbHpePQ0BxIzMbo1rGnscIiIiIjU6Jw4HTt2DAMHDkT9+vVx69Yt+Pn54f79+xAEAa1btzZEjERUxR3+p7AoRFefmrA0NzFyNERERETqdF5IMHv2bMycORM3btyARCLB7t278fDhQ3Tr1g1vvPGGIWIkoiruyLNpeoF+nKZHREREFZPOidPNmzcxcuRIAICpqSlycnJgY2ODL7/8EosXL9Z7gERUtd1PysKtuAyYikXo1djV2OEQERERaaRz4mRtba1c1+Tu7o579+4p70tKStJfZERULSg2ve1Q3wn2VmZGjoaIiIhIM53XOHXo0AGnT59GkyZN0L9/f3z44Ye4ceMG9uzZgw4dOhgiRiKqwhSJU9+mHG0iIiKiikvnxOm7775DZmYmACA4OBiZmZn47bff4OPjw4p6RKSThPRcXIlJBcAy5ERERFSx6ZQ4yWQyPHr0CM2bNwdQOG3v+++/N0hgRFT1HY4srKbXqo4DXO0kRo6GiIiIqGg6rXEyMTFBnz598PTpU0PFQ0TVyBHlND2ONhEREVHFpnNxCD8/P0RFRRkiFiKqRtKypTh3LxkAEyciIiKq+HROnObPn4+ZM2di//79iI2NRXp6usofIiJt/PVvPArkAhq52sLL2drY4RAREREVS+fiEP379wcADBw4ECKRSNkuCAJEIhFkMpn+oiOiKiskgtX0iIiIqPLQOXE6fvy4IeIgomokJ1+GE7cTAbCaHhEREVUOOidO3bp1M0QcRFSNnLyTiFypHB4Olmhay87Y4RARERGVSOc1TgBw6tQpjBgxAh07dsTjx48BAJs3b8bp06f1GhwRVU2KTW8D/dxUpvwSERERVVQ6J067d+9G3759YWlpiStXriAvLw8AkJaWhgULFug9QCKqWqQyOY4+27+J1fSIiIiosihVVb3vv/8eP/74I8zMzJTtnTp1wpUrV/QaHBFVPReiUpCeWwAna3O0qVvD2OEQERERaUXnxOnff/9F165d1drt7e2Rmpqqj5iIqApTTNML8HWFiZjT9IiIiKhy0DlxcnNzw927d9XaT58+jfr16+slKCKqmuRyAUcin5Uh9+M0PSIiIqo8dE6cxo8fj2nTpuHChQsQiUR48uQJtm7dipkzZ2LSpEmGiJGIqojwR6mIT8+DjYUpOno7GTscIiIiIq3pXI78k08+gVwuR69evZCdnY2uXbvCwsICM2fOxPvvv2+IGImoilBM0+vR2AUWpiZGjoaIiIhIezonTiKRCHPmzMGsWbNw9+5dZGZmwtfXFzY2NoaIj4iqCEEQcOQfRTU9VyNHQ0RERKQbnafqbdmyBdnZ2TA3N4evry/at2/PpImISnQnIRPRSVkwNxWjeyMXY4dDREREpBOdE6fp06fDxcUFw4cPx8GDByGTyQwRFxFVMSERhdP0ujRwho2FzoPdREREREalc+IUGxuLHTt2QCQSYciQIXB3d8d7772Hs2fPGiI+IqoiFOubuOktERERVUY6J06mpqZ4+eWXsXXrViQkJGDZsmW4f/8+evToAW9vb0PESESV3MOUbPzzJB1iEdCrCafpERERUeVTpvkyVlZW6Nu3L54+fYoHDx7g5s2b+oqLiKqQI5GFRSHaeznCycbCyNEQERER6U7nEScAyM7OxtatW9G/f394eHhg+fLlePXVV/HPP//oOz4iqgIOR3CaHhEREVVuOo84DR06FPv374eVlRWGDBmCzz//HP7+/oaIjYiqgKTMPPz9IAUA0IeJExEREVVSOidOJiYm+P3339G3b1+YmKhuYBkREQE/Pz+9BUdEld/RyHgIAtDMwx4eDpbGDoeIiIioVHROnLZu3apyOyMjA9u3b8dPP/2Ey5cvszw5Ean4r5oeN70lIiKiyqtUa5wA4OTJkxg1ahTc3d3x7bffomfPnjh//rw+YyOiSi4jV4ozd5MBAIF+nKZHRERElZdOI05xcXHYuHEjfv75Z6Snp2PIkCHIy8vD3r174evra6gYiaiSOv5vIvJlctSvaY0GLrbGDoeIiIio1LQecQoKCkKjRo1w/fp1LF++HE+ePMGqVasMGRsRVXLc9JaIiIiqCq1HnA4dOoSpU6di0qRJ8PHxMWRMRFQF5EplCLuVAICJExEREVV+Wo84nT59GhkZGWjTpg1eeuklrF69GklJSYaMjYgMRCYXcO5eMv4Mf4xz95Ihkwt6v8aZu0nIypfB3V6C5h72ej8/ERERUXnSesSpQ4cO6NChA5YvX47ffvsNv/zyC2bMmAG5XI7Q0FB4enrC1pZrGIj0RSYXcDE6BQkZuXCxlaC9lyNMxKIynzckIhbB+yIRm5arbHO3l2BukC8C/dzLfH4FxTS9Pr6uEOshbiIiIiJj0rmqnrW1NcaMGYPTp0/jxo0b+PDDD7Fo0SK4uLhg4MCBhoiRqNoJiYhF58V/YdiP5zFtRziG/XgenRf/hZCI2DKfd9KWKypJEwDEpeVi0pYrZT6/QoFMjqM3OU2PiIiIqo5SlyMHgEaNGmHJkiV49OgRtm/frq+YiKo1QyU3MrmA4H2R0DQpT9EWvC+yTNP2FFMAlx+9g5SsfNhbmqK9l2Opz0dERERUUei8Aa4mJiYmGDRoEAYNGqSP0xFVW9okNzN3XsOF6BQUyARIZXLkF8iR/+xvqazw39ICAXnPtxXIkZknRUqWtMhrCwBi03Lx46ko9PF1RS0HS0jMTLSOXdMUwPwCAUdvxut1CiARERGRMeglcSIi/bgYnaI20vSizDwZNpy5b7AYFh26hUWHbgEAXGwt4OEggShbjFtmd1DH2Qa1a1iidg0r1HKQwMK0MLFSjJK9mPDlSGWYtOUK1o1ozeSJiIiIKjUmTkQVSEJG8UmTQq8mLmhayx7mJiKYm4phZiJW/m2huG0ihplp4d/mpiL8G5uBT/dGlHhuzxqWSM7KR3a+DAkZeUjIyAMgxpWT0SrHiUSFiVVtB0v8E5uucZRMIXhfJAJ83fRS3IKIiIjIGJg4EVUgLrYSrY4b17k+/L2ddDp3S88aWHX8LuLScjUmOSIAbvYShM3qAbEIeJotxaOn2bifmIFj56/C1q0eHqfm4tHTHDx6moMcqQzx6XmIT88r9rqKKYAXo1N0jpmIiIioomDiRFSBtPdyhLu9pMTkpjQFF0zEIswN8sWkLVcgAlTOrxgHmhvkqxwVcrQ2h6O1OZq4WkOIEdC/fxOYmZkBAARBQEpWPh49zcEfVx9h49kHJV5f29E0IiIiooqoTFX1iEi/FMlNUUkToJrc6CrQzx3rRrSGm73qyJabvUSndUgikQhONhZo4emAvk21e4y2o2lEREREFRFHnIgqmEA/d/jXd8K5qGSVdjc9bVIb6OeOAF83vW2ua8hRMiIiIqKKgokTUQVTIJPjVlw6AOCzAU1Q09aizMnNi0zEIr2tN9J1CiARERFRZcTEiaiCufzgKZ5mS2FvaYbRHevB1KTiz6hVTAF8cR8nfY2SERERERmbUT+RnTx5EkFBQahVqxZEIhH27t1b4mPCwsLQunVrWFhYoEGDBti4caPB4yQqT6GR8QAKS45XhqRJIdDPHac/7ont4ztgxdCW2D6+A05/3JNJExEREVUJRv1UlpWVhRYtWmDNmjVaHR8dHY0BAwagR48eCA8PxwcffIBx48bh8OHDBo6UqHwIgoAjzxKnPr5uRo5Gd4opgK+09IC/txOn5xEREVGVYdSpev369UO/fv20Pv7777+Hl5cXli5dCgBo0qQJTp8+jWXLlqFv376GCpOo3Pwbn4GYlGxYmIrRtaGzscMhIiIiomcq1Rqnc+fOoXfv3iptffv2xQcffFDkY/Ly8pCX998GnenphYvupVIppFKpQeJUUJzf0NepzqpaHx+6/gQA0MnbCWYioUI8r6rWxxUN+9fw2MeGxz42PPax4bGPDaui9q8u8VSqxCkuLg6urq4qba6urkhPT0dOTg4sLS3VHrNw4UIEBwertR85cgRWVlYGi/V5oaGh5XKd6qyq9PHu6yYARHAtiMPBgweNHY6KqtLHFRX71/DYx4bHPjY89rHhsY8Nq6L1b3Z2ttbHVqrEqTRmz56NGTNmKG+np6fD09MTffr0gZ2dnUGvLZVKERoaioCAAJiZmRn0WtVVVerjJ6k5eHTuFMQiYNobveBkbW7skABUrT6uiNi/hsc+Njz2seGxjw2PfWxYFbV/FbPRtFGpEic3NzfEx8ertMXHx8POzk7jaBMAWFhYwMLCQq3dzMys3F608rxWdVUV+jjszmMAQNu6jnBzsDZyNOqqQh9XZOxfw2MfGx772PDYx4bHPjasita/usRSeWodA/D398exY8dU2kJDQ+Hv72+kiIj050hkHACgT1PXEo4kIiIiovJm1MQpMzMT4eHhCA8PB1BYbjw8PBwxMTEACqfZjRw5Unn8xIkTERUVhY8++gi3bt3C2rVr8fvvv2P69OnGCJ9Ib9KypTgflQIACPBl4kRERERU0Rg1cbp06RJatWqFVq1aAQBmzJiBVq1a4YsvvgAAxMbGKpMoAPDy8sKBAwcQGhqKFi1aYOnSpfjpp59Yipwqvb/+jYdMLqCRqy3qOlW8aXpERERE1Z1R1zh1794dgiAUef/GjRs1Pubq1asGjIqo/B3559mmt5ymR0RERFQhVao1TkRVUa5UhhO3EwEAfXzdjBwNEREREWnCxInIyM7eS0J2vgzu9hL4eRi2RD4RERERlQ4TJyIjU07T83WFSCQycjREREREpAkTJyIjkskFHL1ZmDgFcJoeERERUYXFxInIiK7GPEVSZj5sJaZ4qb6jscMhIiIioiIwcSIyoiORhaNNvRq7wMyEP47/b+/ew6Kq1j+Af2eA4TogyGVQkIsKiiKiJqKdvAtmpFlHj6KSmKVpmZapqSHZLz3H8tIp0WOJ3QzTtDSTo5KQCmWaqIghclBSQVTkJiKXWb8/PMxxnBkGkGEAv5/nocfZe+21135Z7HhZa69NRERE1FzxNzUiIxFCYP/ZPADAiG6cpkdERETUnDFxIjKSC/mluHizDDJTKZ7wcTJ2c4iIiIioFkyciIykZpre450cYWNu1HdRExEREZEeTJyIjKRmmt5wPxcjt4SIiIiI9GHiRGQEuUV3cOpyESQSYGhXZ2M3h4iIiIj0YOJEZAQH/ztNr1cHezjLLYzcGiIiIiLSh4kTkRHUPN80gtP0iIiIiFoEJk5ETazoTiVSsm4C4DLkRERERC0FEyeiJpaYkY8qpUAnZxt4OVobuzlEREREVAdMnIiaGKfpEREREbU8TJyImtDdqmok/pEPgNP0iIiIiFoSJk5ETSgl6yZuV1TDxdYcPdrbGbs5RERERFRHTJyImlDNNL3hfi6QSiVGbg0RERER1RUTJ6ImolQKHFA938RpekREREQtCRMnoiaSerkQ10vuQm5uin7ebY3dHCIiIiKqByZORE1k/9l7o02DujhDZsofPSIiIqKWhL+9ETWR/el5ALgMOREREVFLxMSJqAlcyC/Ff67fhpmJBIN8nYzdHCIiIiKqJyZORE2gZlGI/h0dIbcwM3JriIiIiKi+mDgRNYGaaXrDOU2PiIiIqEVi4kRkYPnF5TiZUwiAiRMRERFRS8XEicjADpy7N02vp3sbuNhaGLk1RERERNQQTJyIDEz10ttuHG0iIiIiaqmYOBEZUEl5JZIv3AQAjPBTGLk1RERERNRQTJyIDCjp/HVUVCvh7WSNTs42xm4OERERETUQEyciA9p/9t40PS4KQURERNSyMXEiMpCKKiUO/ZEPgNP0iIiIiFo6Jk5EBvLLf26i5G4VHG3MEejextjNISIiIqKHwMSJyEBqVtMb7ucCqVRi5NYQERER0cNg4kRkAEql4DLkRERERK2IqbEbQFSjWilwLLsA+SXlcJZboK+XA0xa6EjNmStFyCsuh7XMBP07tjV2c4iIiIjoITFxomYhPi0X0XvSkVtUrtrmameBqDA/hHZ3NWLLGmZ/eh4AYJCvM8xNTYzcGiIiIiJ6WJyqR0YXn5aLmV/+rpY0AUBeUTlmfvk74tNyH/oc1UqBlKyb+D71ClKybqJaKR66ztrULEPOaXpERERErQNHnMioqpUC0XvSoS2NEQAkAKL3pGO4n6LB0/aaejQr+8ZtZOaXwlQqwSBf50avn4iIiIiaHkecyKiOZRdojDTdTwDILSrHmgMZSDp/HacvFyLnZhmKyyshhP5Ro6YYzXrQgf9O0wvu2BZ2lmaNXj8RERERNT2OOJFR5ZfoTpru99GhLOBQlto2E6kEdpamMK02wWdXjsHBWgY7SxnsrczQxsoMtpZmWH3gvEFHs7RRTdPz4zQ9IiIiotaCiRMZlbPcok7l/FxtIQAUllWgsKwSdyqrUa0UKLhdCUCC/JzCep+7ZjTrWHYBghtp5bvrJXdxIucWAGAYEyciIiKiVoOJExlVXy8HOMvNkV9yV+t+CQCFnQX2vPK42qhQeWU1CssqcaO4DPGHjqBLj14oqVCisKwShWUVuFVWgXO5xThzpVhvG+o66lUXCeeuQQigh5sdXO0sG61eIiIiIjIuJk5kVFIJ4GKrPXGqSZOiwvw0ptJZmJlAYWeCtlYmyLITCOnmAjMz9eeJUrJuYsKmX/S24XZ5VYPb/6D96ZymR0RERNQacXEIMqq43/7EmSvFMJVK4GgjU9unsLNAzKReDV75rq+XA1ztLKDv6aW3vkvD9M+PI/2q/tGp2ty+W4UjF24AAEZ0UzxUXURERETUvHDEiYzm4o3bWP5DOgBgQWgXRD7uhWPZBcgvKYez3AJ9vRweatEGE6kEUWF+mPnl75AAaotE1HwO8nLAbxcLcCD9Gg6kX8OT/gq8NswHPi7yep/v5/PXUVGlhGdbK3R2tmlwu4mIiIio+WHiREZRVa3EvG9SUVZRjX7eDpj2uBekUkmjLdJQI7S7K2Im9dJ4j5Pivvc4XcgvxbqETPxw+ip+PJOHfWl5COvRDnOGdUZHp7onQDXT9Ib7uUAiabxV+oiIiIjI+Jg4kVFsSMrC7zmFkJub4v2/BkDaiMuBPyi0uyuG+yl0jmZ1crbBPycEYvbgTlh78Dz2peVh96mr+OH0VYwJbI9Xh3SGp6N1reeorFYi4dx/n2/iND0iIiKiVoeJEzW5M5eLsPZgJgAgenQ3uNlbGfycJnUYzfJVyBEzqTfOXr3XvgPp17Dz9yv4PvUqnuvlhtlDOsHdQXtbj2UXoLi8Cm2tZejVwd4Ql0BERERERsTFIahJlVdW47VtJ1GlFHjSX4FnAtsbu0kaurWzw6YpfbB79gAM9nVCtVJg2/E/MeSDRCzedQZXC++oylYrBVKybmJD0r2X8w7p4tyoL9MlIiIiouaBI07UpFbu+wNZ12/DWW6O/xvj36yfBerh1gaxU/vixKVbWHvwPA5n3sBXv+Zg+/HLmBjUAV0VcqxNyFR7durguWuIT8tt8EqARERERNQ8ccSJmszP569jS/JFAMA/nusBe2tZ7Qc0E7097PHFtCBse7EfgrwcUFGtxJbki1iw84xa0gQAhWWVmPnl74hPyzVSa4mIiIjIEJpF4vTxxx/D09MTFhYWCAoKwrFjx2otv3btWvj6+sLS0hLu7u6YO3cuysvLaz2GjKuwrALzd5wCAEwJ9sAgX2cjt6j+grzbIu7Ffvgysi/MTLSPlNUseR69Jx3VSqG1DBERERG1PEZPnLZt24Z58+YhKioKv//+OwICAhASEoL8/Hyt5bdu3YqFCxciKioK586dw6effopt27bhrbfeauKWU10JIbD4uzRcK74Lb0drLBrZ1dhNajCJRAITEykqq3UnRQJAblE5jmUXNF3DiIiIiMigjJ44rV69GtOnT8fUqVPh5+eHDRs2wMrKCps3b9ZaPjk5GQMGDMDEiRPh6emJESNGYMKECXpHqch4dp+6ir2nc2EilWDN+J6wlJkYu0kPJb+kbqObdS1HRERERM2fUReHqKiowIkTJ7Bo0SLVNqlUimHDhiElJUXrMf3798eXX36JY8eOoW/fvvjPf/6DH3/8EZMnT9Za/u7du7h7967qc3FxMQCgsrISlZWVjXg1mmrqN/R5mrPconIs+S4NADBrkDf8FNaNGg9jxLitVd1+bNpambaK7z37sWExvobHGBseY2x4jLHhMcaG1VzjW5/2SIQQRnsQ4+rVq2jfvj2Sk5MRHBys2v7mm28iKSkJv/76q9bjPvzwQ7zxxhsQQqCqqgozZsxATEyM1rLLli1DdHS0xvatW7fCysrw7w96lCkFsD5disxiKTxsBOZ0r4aOR4NaFKUAon83QWEFAGi7IIE2MiCqVzW4MjkRERFR81VWVoaJEyeiqKgItra2tZZtccuRJyYm4r333sP69esRFBSECxcuYM6cOVi+fDmWLl2qUX7RokWYN2+e6nNxcTHc3d0xYsQIvcF5WJWVlThw4ACGDx8OMzMzg56rOYpNvoTM4gxYmkmxaVowvBytG/0cxoqxmec1vBJ3b7GL+//yIPnvf98dG4CQbi5N1h5DetT7saExvobHGBseY2x4jLHhMcaG1VzjWzMbrS6Mmjg5OjrCxMQE165dU9t+7do1KBQKrccsXboUkydPxgsvvAAA8Pf3x+3bt/Hiiy9i8eLFkErVH9syNzeHubm5Rj1mZmZN9k1rynM1Fxl5JXj/QCYAYPEoP/i4tjHo+Zo6xk/1dIOpqQmi96SrLUmusLNAVJhfq3yP06PYj5sS42t4jLHhMcaGxxgbHmNsWM0tvvVpi1ETJ5lMht69eyMhIQFjxowBACiVSiQkJGD27NlajykrK9NIjkxM7i02YMRZh3Sfu1XVeG1bKiqqlBjs64TwoA7GbpJBhHZ3xXA/BY5lFyC/pBzOcgv09XKACefnEREREbU6Rp+qN2/ePERERKBPnz7o27cv1q5di9u3b2Pq1KkAgClTpqB9+/ZYsWIFACAsLAyrV69GYGCgaqre0qVLERYWpkqgyLjWHszEudxi2FuZ4e/P9YBE0noTCROpBMEd2xq7GURERERkYEZPnMaPH4/r16/j7bffRl5eHnr27In4+Hi4uNx7PiQnJ0dthGnJkiWQSCRYsmQJrly5AicnJ4SFheH//u//jHUJdJ/fLhZgQ1IWAGDFWH84yy2M3CIiIiIioodn9MQJAGbPnq1zal5iYqLaZ1NTU0RFRSEqKqoJWkb1UVJeibnbUiEE8Fxvt1b5nA8RERERPZqM/gJcaj2W/5COy7fuoH0bS0SF+Rm7OUREREREjaZZjDhR46pWiiZfsODfZ/PwzfHLkEiA1eMCILdoPqulEBERERE9LCZOrUx8Wq7GEtmuBl4iO7+kHIt2ngEAvPiEN4K8uVgCEREREbUunKrXisSn5WLml7+rJU0AkFdUjplf/o74tNxGP6cQAgu/PYOC2xXo6mqLecN9Gv0cRERERETGxsSplahWCkTvSYe2N1nVbIvek45qZeO+6+rrY3/ipz/yITORYu34njA35ZLwRERERNT6cKpeK3Esu0BjpOl+AkBuUTl2nPgTzwS6QWbasJz5/uenlEqBd/acBQC8GeoLX4W8QXUSERERETV3TJxaifwS3UnT/RZ8ewaLd6XB28kaPi5ydFHI4auwha+LHG72lpDWsoiEtuenAMDHxQaRA7weqv1ERERERM0ZE6dWwsnGvE7lLM2kuFOpxPlrpTh/rRQ/nP7fc09WMhN0dpGji4scPoqapEoORxtz1fNT2ib6nb9Wiv3peXxvExERERG1WkycWoH8knJ8fOhCrWUkABR2Fjj85mDkl9xFRl4JMq6VICOvBH/klSArvxRlFdU49WchTv1ZqHasg5UZSu9Wa02aauqO3pOO4X4Kgy97TkRERERkDEycWrjDmdcxd1sqbpRWwMxEgspqAQmgluTUpDJRYX4wNZGiXRtLtGtjicFdnFVlqqqVuHjzNjLySpGRV4w/8kpw/loJLhWUoaCsstY21Dw/dSy7AMEduRQ5EREREbU+TJxaqMpqJVYfOI8NSVkQAvB1keOjiYHIul6q8RySog7vcTI1kaKTsxydnOUY1eN/5coqqvDpkWx8sP+83jbV9TkrIiIiIqKWholTC3T5Vhle/fokfs8pBABMDOqAt5/yg4XZvWeUhvspVCvfOcst0NfLocFT6Kxkpujj4VCnss5yiwadg4iIiIiouWPi1MLEp+XizR2nUVxeBbm5KVY+20NthAgATKSSRp0y19fLAa52FsgrKtf6nFPN81N9veqWYBERERERtTR8AW4LUV5ZjaXfpWHGl7+juLwKAe5t8OOcv2gkTYZgIpUgKswPwP+el6px//NTXBiCiIiIiForJk4twIX8Uoz5+Ci++OUSAOClJ7yxY0Yw3B2smqwNod1dETOpFxR26tPxFHYWiJnUi0uRExEREVGrxql6zZgQAjtOXMbb35/FncpqtLWW4YNxARjk66z/YAMI7e7aqM9PERERERG1FEycmqnSu1VY+l0adp28AgDo37Et1ozvCRdb4y7A0NjPTxERERERtQRMnJqhtCtFeOXrk8i+cRtSCTB3mA9eHtyJIztEREREREbCxMmIqpVCbdrbY572+OKXS1jx4x+oqFbC1c4C6/4WyNXqiIiIiIiMjImTkcSn5Wq8qNbcVIq7VUoAwLCuLlj1XA/YW8uM1UQiIiIiIvovJk5GEJ+Wi5lf/q7xTqSapGlcHzf8/dkekEg4NY+IiIiIqDngcuRNrFopEL0nXeuLZGsczrwBZW0FiIiIiIioSTFxamLHsgvUpudpk1tUjmPZBU3UIiIiIiIi0oeJUxPLL6k9aapvOSIiIiIiMjwmTk3MWV639zDVtRwRERERERkeE6cm1tfLAa52FtC17IMEgKudBZcgJyIiIiJqRpg4NTETqQRRYX4AoJE81XyOCvPjy26JiIiIiJoRJk5GENrdFTGTekFhpz4dT2FngZhJvRDa3dVILSMiIiIiIm34HicjCe3uiuF+ChzLLkB+STmc5fem53GkiYiIiIio+WHiZEQmUgmCO7Y1djOIiIiIiEgPTtUjIiIiIiLSg4kTERERERGRHkyciIiIiIiI9GDiREREREREpAcTJyIiIiIiIj2YOBEREREREenBxImIiIiIiEgPJk5ERERERER6MHEiIiIiIiLSg4kTERERERGRHkyciIiIiIiI9GDiREREREREpAcTJyIiIiIiIj1Mjd2ApiaEAAAUFxcb/FyVlZUoKytDcXExzMzMDH6+RxFjbHiMsWExvobHGBseY2x4jLHhMcaG1VzjW5MT1OQItXnkEqeSkhIAgLu7u5FbQkREREREzUFJSQns7OxqLSMRdUmvWhGlUomrV69CLpdDIpEY9FzFxcVwd3fHn3/+CVtbW4Oe61HFGBseY2xYjK/hMcaGxxgbHmNseIyxYTXX+AohUFJSgnbt2kEqrf0ppkduxEkqlcLNza1Jz2lra9usOkhrxBgbHmNsWIyv4THGhscYGx5jbHiMsWE1x/jqG2mqwcUhiIiIiIiI9GDiREREREREpAcTJwMyNzdHVFQUzM3Njd2UVosxNjzG2LAYX8NjjA2PMTY8xtjwGGPDag3xfeQWhyAiIiIiIqovjjgRERERERHpwcSJiIiIiIhIDyZOREREREREejBxIiIiIiIi0oOJkx4///wzwsLC0K5dO0gkEnz33Xdq+4UQePvtt+Hq6gpLS0sMGzYMmZmZamUKCgoQHh4OW1tbtGnTBtOmTUNpaalamdOnT+Mvf/kLLCws4O7ujn/84x+GvrRmo7YYV1ZWYsGCBfD394e1tTXatWuHKVOm4OrVq2p1eHp6QiKRqH2tXLlSrQxjrLsfP//88xrxCw0NVSvDfqybvvg+GNuar1WrVqnKsA/XbsWKFXjssccgl8vh7OyMMWPGICMjQ61MeXk5Zs2ahbZt28LGxgbPPvssrl27plYmJycHo0aNgpWVFZydnTF//nxUVVWplUlMTESvXr1gbm6OTp06YcuWLYa+PKPTF9+CggK88sor8PX1haWlJTp06IBXX30VRUVFavVo6+dxcXFqZR7F+AJ168ODBg3SiN+MGTPUyrAP66YvxhcvXtR5P96+fbuqHPuxbjExMejRo4fqJbbBwcHYt2+fan+rvw8LqtWPP/4oFi9eLHbu3CkAiF27dqntX7lypbCzsxPfffedOHXqlHj66aeFl5eXuHPnjqpMaGioCAgIEL/88os4fPiw6NSpk5gwYYJqf1FRkXBxcRHh4eEiLS1NfP3118LS0lJs3LixqS7TqGqLcWFhoRg2bJjYtm2b+OOPP0RKSoro27ev6N27t1odHh4e4p133hG5ubmqr9LSUtV+xrj2fhwRESFCQ0PV4ldQUKBWhv1YN33xvT+uubm5YvPmzUIikYisrCxVGfbh2oWEhIjY2FiRlpYmUlNTxZNPPik6dOigFqMZM2YId3d3kZCQII4fPy769esn+vfvr9pfVVUlunfvLoYNGyZOnjwpfvzxR+Ho6CgWLVqkKvOf//xHWFlZiXnz5on09HTxz3/+U5iYmIj4+Pgmvd6mpi++Z86cEWPHjhW7d+8WFy5cEAkJCaJz587i2WefVasHgIiNjVXrx/f///BRja8QdevDAwcOFNOnT1eLX1FRkWo/+3Dt9MW4qqpK434cHR0tbGxsRElJiaoe9mPddu/eLfbu3SvOnz8vMjIyxFtvvSXMzMxEWlqaEKL134eZONXDg78QKZVKoVAoxKpVq1TbCgsLhbm5ufj666+FEEKkp6cLAOK3335Tldm3b5+QSCTiypUrQggh1q9fL+zt7cXdu3dVZRYsWCB8fX0NfEXNj7ZfOh907NgxAUBcunRJtc3Dw0OsWbNG5zGM8f/oSpxGjx6t8xj247qrSx8ePXq0GDJkiNo29uH6yc/PFwBEUlKSEOLevdfMzExs375dVebcuXMCgEhJSRFC3EtwpVKpyMvLU5WJiYkRtra2qri++eabolu3bmrnGj9+vAgJCTH0JTUrD8ZXm2+++UbIZDJRWVmp2qav/zO+/6MtxgMHDhRz5szReQz7cP3UpR/37NlTREZGqm1jP64fe3t78cknnzwS92FO1XsI2dnZyMvLw7Bhw1Tb7OzsEBQUhJSUFABASkoK2rRpgz59+qjKDBs2DFKpFL/++quqzBNPPAGZTKYqExISgoyMDNy6dauJrqblKCoqgkQiQZs2bdS2r1y5Em3btkVgYCBWrVqlNuzLGOuXmJgIZ2dn+Pr6YubMmbh586ZqH/tx47l27Rr27t2LadOmaexjH667miliDg4OAIATJ06gsrJS7X7cpUsXdOjQQe1+7O/vDxcXF1WZkJAQFBcX4+zZs6oy99dRU6amjkfFg/HVVcbW1hampqZq22fNmgVHR0f07dsXmzdvhrjvdZGM7//oivFXX30FR0dHdO/eHYsWLUJZWZlqH/tw/ejrxydOnEBqaqrW+zH7sX7V1dWIi4vD7du3ERwc/Ejch031FyFd8vLyAEDtm1/zuWZfXl4enJ2d1fabmprCwcFBrYyXl5dGHTX77O3tDdL+lqi8vBwLFizAhAkTYGtrq9r+6quvolevXnBwcEBycjIWLVqE3NxcrF69GgBjrE9oaCjGjh0LLy8vZGVl4a233sLIkSORkpICExMT9uNG9Nlnn0Eul2Ps2LFq29mH606pVOK1117DgAED0L17dwD3YiCTyTT+oPLg/Vjb/bpmX21liouLcefOHVhaWhrikpoVbfF90I0bN7B8+XK8+OKLatvfeecdDBkyBFZWVti/fz9efvlllJaW4tVXXwXA+NbQFeOJEyfCw8MD7dq1w+nTp7FgwQJkZGRg586dANiH66Mu/fjTTz9F165d0b9/f7Xt7Me1O3PmDIKDg1FeXg4bGxvs2rULfn5+SE1NbfX3YSZO1GJUVlZi3LhxEEIgJiZGbd+8efNU/+7RowdkMhleeuklrFixAubm5k3d1Bbnb3/7m+rf/v7+6NGjBzp27IjExEQMHTrUiC1rfTZv3ozw8HBYWFiobWcfrrtZs2YhLS0NR44cMXZTWiV98S0uLsaoUaPg5+eHZcuWqe1bunSp6t+BgYG4ffs2Vq1apfqFk+7RFeP7E1F/f3+4urpi6NChyMrKQseOHZu6mS2avn58584dbN26Va3P1mA/rp2vry9SU1NRVFSEHTt2ICIiAklJScZuVpPgVL2HoFAoAEBjtZBr166p9ikUCuTn56vtr6qqQkFBgVoZbXXcf45HXU3SdOnSJRw4cEBttEmboKAgVFVV4eLFiwAY4/ry9vaGo6MjLly4AID9uLEcPnwYGRkZeOGFF/SWZR/Wbvbs2fjhhx9w6NAhuLm5qbYrFApUVFSgsLBQrfyD92N9MdRVxtbWttX/FRnQHd8aJSUlCA0NhVwux65du2BmZlZrfUFBQbh8+TLu3r0LgPEF9Mf4fkFBQQCgdi9mH9avLjHesWMHysrKMGXKFL31sR+rk8lk6NSpE3r37o0VK1YgICAA69ateyTuw0ycHoKXlxcUCgUSEhJU24qLi/Hrr78iODgYABAcHIzCwkKcOHFCVeann36CUqlU3RCDg4Px888/o7KyUlXmwIED8PX1faSm3+hSkzRlZmbi4MGDaNu2rd5jUlNTIZVKVdPLGOP6uXz5Mm7evAlXV1cA7MeN5dNPP0Xv3r0REBCgtyz7sDohBGbPno1du3bhp59+0pi22Lt3b5iZmandjzMyMpCTk6N2Pz5z5ozaHwFq/hDj5+enKnN/HTVlauporfTFF7j3/7cRI0ZAJpNh9+7dGqOm2qSmpsLe3l41avqoxheoW4wflJqaCgBq92L2Yd3qE+NPP/0UTz/9NJycnPTWy35cO6VSibt37z4a92EjLkzRIpSUlIiTJ0+KkydPCgBi9erV4uTJk6oV3VauXCnatGkjvv/+e3H69GkxevRorcuRBwYGil9//VUcOXJEdO7cWW0Z58LCQuHi4iImT54s0tLSRFxcnLCysnpklhmuLcYVFRXi6aefFm5ubiI1NVVtadCa1VeSk5PFmjVrRGpqqsjKyhJffvmlcHJyElOmTFGdgzHWHeOSkhLxxhtviJSUFJGdnS0OHjwoevXqJTp37izKy8tVdbAf66bvPiHEveXEraysRExMjMbx7MP6zZw5U9jZ2YnExES1+0BZWZmqzIwZM0SHDh3ETz/9JI4fPy6Cg4NFcHCwan/NMrgjRowQqampIj4+Xjg5OWldBnf+/Pni3Llz4uOPP242y+Aakr74FhUViaCgIOHv7y8uXLigVqaqqkoIcW+Z4k2bNokzZ86IzMxMsX79emFlZSXefvtt1Xke1fgKoT/GFy5cEO+88444fvy4yM7OFt9//73w9vYWTzzxhKoO9uHa1eU+IYQQmZmZQiKRiH379mnUwX5cu4ULF4qkpCSRnZ0tTp8+LRYuXCgkEonYv3+/EKL134eZOOlx6NAhAUDjKyIiQghxb0nypUuXChcXF2Fubi6GDh0qMjIy1Oq4efOmmDBhgrCxsRG2trZi6tSpau8LEEKIU6dOiccff1yYm5uL9u3bi5UrVzbVJRpdbTHOzs7Wug+AOHTokBBCiBMnToigoCBhZ2cnLCwsRNeuXcV7772n9ku/EIyxrhiXlZWJESNGCCcnJ2FmZiY8PDzE9OnT1ZYKFYL9uDb67hNCCLFx40ZhaWkpCgsLNY5nH9ZP130gNjZWVebOnTvi5ZdfFvb29sLKyko888wzIjc3V62eixcvipEjRwpLS0vh6OgoXn/9dbXltIW49/3s2bOnkMlkwtvbW+0crZW++Orq4wBEdna2EOLeKwp69uwpbGxshLW1tQgICBAbNmwQ1dXVaud6FOMrhP4Y5+TkiCeeeEI4ODgIc3Nz0alTJzF//ny19zgJwT5cm7rcJ4QQYtGiRcLd3V2jbwrBfqxPZGSk8PDwEDKZTDg5OYmhQ4eqkiYhWv99WCLEfesrEhERERERkQY+40RERERERKQHEyciIiIiIiI9mDgRERERERHpwcSJiIiIiIhIDyZOREREREREejBxIiIiIiIi0oOJExERERERkR5MnIiIiIiIiPRg4kRE9F8SiQTfffddncsvW7YMPXv2rLXM888/jzFjxjxUu+rq4sWLkEgkSE1NbZLzNdSWLVvQpk0bYzejyeKVkJCArl27orq6+qHrGjRoEF577bWHbxQ1iKenJ9auXQsAqKiogKenJ44fP27cRhFRk2HiREQtRlhYGEJDQ7XuO3z4MCQSCU6fPt3g+nNzczFy5MgGH28ogwYNgkQigUQigbm5Odq3b4+wsDDs3LlTrZy7uztyc3PRvXt3I7W0bsaPH4/z588b/DzZ2dmYOHEi2rVrBwsLC7i5uWH06NH4448/ADRdvN58800sWbIEJiYmAO4ljhKJRKMvFxYWQiKRIDExUWddO3fuxPLlyx+qPdqS+R07dsDCwgIffPBBnep4mGt4/vnnIZFIMGPGDI19s2bNgkQiwfPPP1+ndgC6E+Bly5ZpbSMArFq1ChKJBIMGDarzeR4kk8nwxhtvYMGCBQ2ug4haFiZORNRiTJs2DQcOHMDly5c19sXGxqJPnz7o0aNHveutqKgAACgUCpibmz90Ow1h+vTpyM3NRVZWFr799lv4+fnhb3/7G1588UVVGRMTEygUCpiamhqxpfpZWlrC2dnZoOeorKzE8OHDUVRUhJ07dyIjIwPbtm2Dv78/CgsLATRNvI4cOYKsrCw8++yzattNTU1x8OBBHDp0qF71OTg4QC6XN2YT8cknnyA8PBwxMTF4/fXX63xcQ68BuJe0xsXF4c6dO6pt5eXl2Lp1Kzp06FDv+nRxdXXFoUOHNO4ZmzdvbpTzhIeH48iRIzh79uxD10VEzR8TJyJqMZ566ik4OTlhy5YtattLS0uxfft2TJs2DTdv3sSECRPQvn17WFlZwd/fH19//bVa+UGDBmH27Nl47bXX4OjoiJCQEACaU/UWLFgAHx8fWFlZwdvbG0uXLkVlZaVGuzZu3Ah3d3dYWVlh3LhxKCoq0nkNSqUSK1asgJeXFywtLREQEIAdO3bovXYrKysoFAq4ubmhX79++Pvf/46NGzdi06ZNOHjwIADNv7wnJiZCIpHg3//+NwIDA2FpaYkhQ4YgPz8f+/btQ9euXWFra4uJEyeirKyszm2sqTchIQF9+vSBlZUV+vfvj4yMDFWZU6dOYfDgwZDL5bC1tUXv3r1VU5q0TdWLiYlBx44dIZPJ4Ovriy+++EJtv0QiwSeffIJnnnkGVlZW6Ny5M3bv3q0zXmfPnkVWVhbWr1+Pfv36wcPDAwMGDMC7776Lfv36aY1XzUjIg181oyd3797FG2+8gfbt28Pa2hpBQUG1jqwAQFxcHIYPHw4LCwu17dbW1oiMjMTChQtrPf5BD07V8/T0xHvvvYfIyEjI5XJ06NAB//rXv+pc3z/+8Q+88soriIuLw9SpUwEA169fh0KhwHvvvacql5ycDJlMhoSEhIe+BgDo1asX3N3d1UZNd+7ciQ4dOiAwMFCtbHx8PB5//HG0adMGbdu2xVNPPYWsrCzVfi8vLwBAYGCgxiiSs7MzRowYgc8++0ztWm7cuIFRo0apnUfbNMgxY8bUOvplb2+PAQMGIC4urq6XTkQtGBMnImoxTE1NMWXKFGzZsgVCCNX27du3o7q6GhMmTEB5eTl69+6NvXv3Ii0tDS+++CImT56MY8eOqdX12WefQSaT4ejRo9iwYYPW88nlcmzZsgXp6elYt24dNm3ahDVr1qiVuXDhAr755hvs2bMH8fHxOHnyJF5++WWd17BixQp8/vnn2LBhA86ePYu5c+di0qRJSEpKqnc8IiIiYG9vrzFl70HLli3DRx99hOTkZPz5558YN24c1q5di61bt2Lv3r3Yv38//vnPf9a7jYsXL8YHH3yA48ePw9TUFJGRkap94eHhcHNzw2+//YYTJ05g4cKFMDMz09q+Xbt2Yc6cOXj99deRlpaGl156CVOnTtUYyYiOjsa4ceNw+vRpPPnkkwgPD0dBQYHWOp2cnCCVSrFjx446P1u0bt065Obmqr7mzJkDZ2dndOnSBQAwe/ZspKSkIC4uDqdPn8Zf//pXhIaGIjMzU2edhw8fRp8+fbTuW7ZsGc6cOVOnxLk2H3zwAfr06aPqezNnzlRLYnVZsGABli9fjh9++AHPPPOMaruTkxM2b96MZcuW4fjx4ygpKcHkyZMxe/ZsDB06tNGuITIyErGxsarPmzdvViVv97t9+zbmzZuH48ePIyEhAVKpFM888wyUSiUAqH62Dx48iNzcXI2fh8jISLU/tmzevBnh4eGQyWT1brM2ffv2xeHDhxulLiJq5gQRUQty7tw5AUAcOnRIte0vf/mLmDRpks5jRo0aJV5//XXV54EDB4rAwECNcgDErl27dNazatUq0bt3b9XnqKgoYWJiIi5fvqzatm/fPiGVSkVubq4QQoiIiAgxevRoIYQQ5eXlwsrKSiQnJ6vVO23aNDFhwgSd5x04cKCYM2eO1n1BQUFi5MiRQgghsrOzBQBx8uRJIYQQhw4dEgDEwYMHVeVXrFghAIisrCzVtpdeekmEhITUuY3a6t27d68AIO7cuSOEEEIul4stW7ZobXNsbKyws7NTfe7fv7+YPn26Wpm//vWv4sknn1R9BiCWLFmi+lxaWioAiH379mk9hxBCfPTRR8LKykrI5XIxePBg8c4776hd94Pxut+3334rLCwsxJEjR4QQQly6dEmYmJiIK1euqJUbOnSoWLRokc422NnZic8//1zn9S9cuFD4+PiIyspKcevWLY2+/aAH+4KHh4da31cqlcLZ2VnExMTorCMiIkLIZDIBQCQkJOgs9/LLLwsfHx8xceJE4e/vL8rLyxvlGmp+JvLz84W5ubm4ePGiuHjxorCwsBDXr18Xo0ePFhERETqPv379ugAgzpw5I4TQ/X2MiooSAQEBoqKiQjg7O4ukpCRRWloq5HK5OHXqlJgzZ44YOHCgqry2n7MH2+Lh4SHWrFmjVmbdunXC09NTZ3uJqPXgiBMRtShdunRB//79sXnzZgD3RnwOHz6MadOmAQCqq6uxfPly+Pv7w8HBATY2Nvj3v/+NnJwctXp69+6t91zbtm3DgAEDoFAoYGNjgyVLlmjU06FDB7Rv3171OTg4GEqlUutf/C9cuICysjIMHz4cNjY2qq/PP/9cbepRfQghIJFIai1z/3NfLi4uqqmH92/Lz8+vdxvvr9fV1RUAVPXMmzcPL7zwAoYNG4aVK1fWen3nzp3DgAED1LYNGDAA586d03k+a2tr2Nraqs6nzaxZs5CXl4evvvoKwcHB2L59O7p164YDBw7oPAYATp48icmTJ+Ojjz5StevMmTOorq6Gj4+PWlySkpJqvbY7d+5oTNO734IFC3D9+nVVf26I++MikUigUChqjUvNMZ6enoiKikJpaanWMu+//z6qqqqwfft2fPXVVzqf/2voNTg5OWHUqFHYsmULYmNjMWrUKDg6OmqUy8zMxIQJE+Dt7Q1bW1t4enoCgMbPoi5mZmaYNGkSYmNjsX37dvj4+DToWUhdLC0t1aa6ElHr1byfICYi0mLatGl45ZVX8PHHHyM2NhYdO3bEwIEDAdxbLWvdunVYu3Yt/P39YW1tjddee021AEQNa2vrWs+RkpKC8PBwREdHIyQkBHZ2doiLi6vzqmPa1PyCunfvXrVkC0CDFqWorq5GZmYmHnvssVrL3T9FTiKRaEyZk0gkqmlP9Wnjg/UCUNWzbNkyTJw4EXv37sW+ffsQFRWFuLg4tSlh9VVbu3WRy+UICwtDWFgY3n33XYSEhODdd9/F8OHDtZbPy8vD008/jRdeeEGVjAP34mJiYoITJ06oVserYWNjo/P8jo6OuHXrls79bdq0waJFixAdHY2nnnqq1mvRpSFxad++PXbs2IHBgwcjNDQU+/bt01h0IisrC1evXoVSqcTFixfh7+/f6NcQGRmJ2bNnAwA+/vhjrWXCwsLg4eGBTZs2oV27dlAqlejevbvGz7S+8wQFBSEtLU1tSun9pFKp2hRgAFqfaXxQQUEBnJyc6twWImq5OOJERC3OuHHjIJVKsXXrVnz++eeIjIxU/eJ+9OhRjB49GpMmTUJAQAC8vb0btPR1cnIyPDw8sHjxYvTp0wedO3fGpUuXNMrl5OTg6tWrqs+//PILpFIpfH19Ncr6+fnB3NwcOTk56NSpk9qXu7t7vdv42Wef4datWxortj2Mxmyjj48P5s6di/3792Ps2LFqz7Pcr2vXrjh69KjatqNHj8LPz6/B16GNRCJBly5dcPv2ba37y8vLMXr0aHTp0gWrV69W2xcYGIjq6mrk5+drxEWhUOg8Z2BgINLT02tt1yuvvAKpVIp169bV/6IegoeHB5KSkpCXl4fQ0FCUlJSo9lVUVGDSpEkYP348li9fjhdeeKHWUayGXkNoaCgqKipQWVmpWqTlfjdv3kRGRgaWLFmCoUOHomvXrhqJaM2zSrU9y9atWzd069YNaWlpmDhxotYyTk5OyM3NVX2urq5GWlqa3mtIS0vTWNCCiFonjjgRUYtjY2OD8ePHY9GiRSguLlZb9apz587YsWMHkpOTYW9vj9WrV+PatWv1/iW8c+fOyMnJQVxcHB577DHs3bsXu3bt0ihnYWGBiIgIvP/++yguLsarr76KcePGaf1lWi6X44033sDcuXOhVCrx+OOPo6ioCEePHoWtrS0iIiJ0tqesrAx5eXmoqqrC5cuXsWvXLqxZswYzZ87E4MGD63VttXmYNta4c+cO5s+fj+eeew5eXl64fPkyfvvtN50J3vz58zFu3DgEBgZi2LBh2LNnD3bu3KlaLbAhUlNTERUVhcmTJ8PPzw8ymQxJSUnYvHmzzvfuvPTSS/jzzz+RkJCA69evq7Y7ODjAx8cH4eHhmDJlCj744AMEBgbi+vXrSEhIQI8ePTRWaKsREhKitqKbNhYWFoiOjsasWbMafL0N5e7ujsTERAwePBghISGIj4+Hra0tFi9ejKKiInz44YewsbHBjz/+iMjISPzwww9a62noNZiYmKimZD44kgfcW7Wubdu2+Ne//gVXV1fk5ORorOLn7OwMS0tLxMfHw83NDRYWFrCzs9Oo66effkJlZaXOly8PGTIE8+bNw969e9GxY0esXr1atXR9bQ4fPvzQ79YiopaBI05E1CJNmzYNt27dQkhICNq1a6favmTJEvTq1QshISEYNGgQFAqFxss+6+Lpp5/G3LlzMXv2bPTs2RPJyclYunSpRrlOnTph7NixePLJJzFixAj06NED69ev11nv8uXLsXTpUqxYsQJdu3ZFaGgo9u7dq1pSWZdNmzbB1dUVHTt2xNixY5Geno5t27bVeq6Gamgba5iYmODmzZuYMmUKfHx8MG7cOIwcORLR0dFay48ZMwbr1q3D+++/j27dumHjxo2IjY19qJeTurm5wdPTE9HR0QgKCkKvXr2wbt06REdHY/HixVqPSUpKQm5uLvz8/ODq6qr6Sk5OBnDvXWFTpkzB66+/Dl9fX4wZMwa//fZbre8DCg8Px9mzZ/WuchcREaH23FlTcnNzQ2JiIm7cuIGQkBDs3r0ba9euxRdffAFbW1tIpVJ88cUXOHz4MGJiYnTW09BrsLW1ha2trdZ9UqkUcXFxOHHiBLp37465c+di1apVamVMTU3x4YcfYuPGjWjXrh1Gjx6ttS5ra2udSRNwbzpfREQEpkyZgoEDB8Lb21vvHyVSUlJQVFSE5557rvaLJKJWQSIenNBLREREjWb+/PkoLi7Gxo0bjd0UamTjx49HQEAA3nrrLWM3hYiaAEeciIiIDGjx4sXw8PDQu2ADtSwVFRXw9/fH3Llzjd0UImoiHHEiIiIiIiLSgyNOREREREREejBxIiIiIiIi0oOJExERERERkR5MnIiIiIiIiPRg4kRERERERKQHEyciIiIiIiI9mDgRERERERHpwcSJiIiIiIhIDyZOREREREREevw/vL7uPZKRo+wAAAAASUVORK5CYII=\n"},"metadata":{}},{"output_type":"stream","name":"stdout","text":["\n","Note: Performance variations can be subtle and depend heavily on\n","the specific GPU, CUDA version, and operation.\n","The 'steps' or non-smoothness relate to Tile/Wave Quantization effects.\n","cudnn.benchmark=True is most effective for *fixed* input sizes.\n","Benchmarking MatMul with torch.utils.benchmark:\n","\n","torch.matmul(a, b)\n"," 1.25 ms\n"," 1 measurement, 100 runs , 1 thread\n","\n","Benchmarking with setup:\n","\n","torch.matmul(x, y)\n","setup: x = torch.randn(s, s, device=dev); y = torch.randn(s, s, device=dev)\n"," 1.36 ms\n"," 1 measurement, 50 runs , 1 thread\n","\n","Comparing implementations:\n","[------------------------ MatMul ------------------------]\n"," | Standard | Hypothetical Opt\n","1 threads: -----------------------------------------------\n"," torch.matmul(a, b) | 1.4 | 1.4 \n","\n","Times are in milliseconds (ms).\n","\n","End of tutorial.\n"]}]},{"cell_type":"code","source":["import torch\n","\n","torch.cuda.is_available()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Cdu35X-xrZvK","executionInfo":{"status":"ok","timestamp":1745899326222,"user_tz":-420,"elapsed":12,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"52bb0f29-a4a6-4c42-be49-48dc3973995e"},"execution_count":8,"outputs":[{"output_type":"execute_result","data":{"text/plain":["True"]},"metadata":{},"execution_count":8}]},{"cell_type":"code","source":["torch.cuda.get_device_properties(0)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"oDctqTgdtYlJ","executionInfo":{"status":"ok","timestamp":1745899336494,"user_tz":-420,"elapsed":16,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"53c06086-ea7d-4035-fe8a-c0d218878143"},"execution_count":9,"outputs":[{"output_type":"execute_result","data":{"text/plain":["_CudaDeviceProperties(name='NVIDIA L4', major=8, minor=9, total_memory=22692MB, multi_processor_count=58, uuid=0363b84d-093e-e8eb-498e-07022b1f6e63, L2_cache_size=48MB)"]},"metadata":{},"execution_count":9}]},{"cell_type":"code","source":["def allocate_empty_tensor(dim_size):\n"," a = torch.empty(4096, dim_size, dtype=torch.float32, device=\"cuda\")\n",""],"metadata":{"id":"IUVmAJ4ttbLG","executionInfo":{"status":"ok","timestamp":1745899546351,"user_tz":-420,"elapsed":41,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}}},"execution_count":10,"outputs":[]},{"cell_type":"code","source":["allocate_empty_tensor(2048)\n"],"metadata":{"id":"X2XZCGMnuOZh","executionInfo":{"status":"ok","timestamp":1745899553389,"user_tz":-420,"elapsed":1,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}}},"execution_count":11,"outputs":[]},{"cell_type":"code","source":["torch.cuda.memory_allocated(0)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"2hffbm7LuQIN","executionInfo":{"status":"ok","timestamp":1745899567450,"user_tz":-420,"elapsed":43,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"3bf5a471-98b0-4c25-9d3e-f43266ec06ad"},"execution_count":12,"outputs":[{"output_type":"execute_result","data":{"text/plain":["54657536"]},"metadata":{},"execution_count":12}]},{"cell_type":"code","source":["torch.cuda.memory_reserved()\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"nowmiSyKuTjP","executionInfo":{"status":"ok","timestamp":1745899597420,"user_tz":-420,"elapsed":42,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"bd6ae554-d179-4d6b-dfa6-ecf73ff0928a"},"execution_count":13,"outputs":[{"output_type":"execute_result","data":{"text/plain":["165675008"]},"metadata":{},"execution_count":13}]},{"cell_type":"code","source":["!nvidia-smi\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"eO8UZHhCua3h","executionInfo":{"status":"ok","timestamp":1745899609867,"user_tz":-420,"elapsed":214,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"c3cbdf69-584b-4453-ce8e-759d0b62d73f"},"execution_count":14,"outputs":[{"output_type":"stream","name":"stdout","text":["Tue Apr 29 04:06:49 2025 \n","+-----------------------------------------------------------------------------------------+\n","| NVIDIA-SMI 550.54.15 Driver Version: 550.54.15 CUDA Version: 12.4 |\n","|-----------------------------------------+------------------------+----------------------+\n","| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\n","| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n","| | | MIG M. |\n","|=========================================+========================+======================|\n","| 0 NVIDIA L4 Off | 00000000:00:03.0 Off | 0 |\n","| N/A 59C P0 29W / 72W | 387MiB / 23034MiB | 0% Default |\n","| | | N/A |\n","+-----------------------------------------+------------------------+----------------------+\n"," \n","+-----------------------------------------------------------------------------------------+\n","| Processes: |\n","| GPU GI CI PID Type Process name GPU Memory |\n","| ID ID Usage |\n","|=========================================================================================|\n","+-----------------------------------------------------------------------------------------+\n"]}]},{"cell_type":"code","source":["torch.cuda.empty_cache()\n","torch.cuda.memory_reserved()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Ona5fnPWud3V","executionInfo":{"status":"ok","timestamp":1745899630394,"user_tz":-420,"elapsed":40,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"e9fd28f7-8c0b-4bf6-d9ae-eae0e2cbbebf"},"execution_count":15,"outputs":[{"output_type":"execute_result","data":{"text/plain":["81788928"]},"metadata":{},"execution_count":15}]},{"cell_type":"code","source":["!nvidia-smi"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"IwIzk9oPui6z","executionInfo":{"status":"ok","timestamp":1745899640946,"user_tz":-420,"elapsed":206,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"42d0b38c-c194-4704-b2be-27761c5311ad"},"execution_count":16,"outputs":[{"output_type":"stream","name":"stdout","text":["Tue Apr 29 04:07:20 2025 \n","+-----------------------------------------------------------------------------------------+\n","| NVIDIA-SMI 550.54.15 Driver Version: 550.54.15 CUDA Version: 12.4 |\n","|-----------------------------------------+------------------------+----------------------+\n","| GPU Name Persistence-M | Bus-Id Disp.A | Volatile Uncorr. ECC |\n","| Fan Temp Perf Pwr:Usage/Cap | Memory-Usage | GPU-Util Compute M. |\n","| | | MIG M. |\n","|=========================================+========================+======================|\n","| 0 NVIDIA L4 Off | 00000000:00:03.0 Off | 0 |\n","| N/A 60C P0 30W / 72W | 307MiB / 23034MiB | 0% Default |\n","| | | N/A |\n","+-----------------------------------------+------------------------+----------------------+\n"," \n","+-----------------------------------------------------------------------------------------+\n","| Processes: |\n","| GPU GI CI PID Type Process name GPU Memory |\n","| ID ID Usage |\n","|=========================================================================================|\n","+-----------------------------------------------------------------------------------------+\n"]}]},{"cell_type":"code","source":["allocate_empty_tensor(2048)\n","torch.cuda.memory_reserved()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"_H-AF34TuldA","executionInfo":{"status":"ok","timestamp":1745899660225,"user_tz":-420,"elapsed":44,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"a4dc55cc-0f26-41b0-f018-5d31c2f6734e"},"execution_count":17,"outputs":[{"output_type":"execute_result","data":{"text/plain":["115343360"]},"metadata":{},"execution_count":17}]},{"cell_type":"code","source":["allocate_empty_tensor(1024)\n","torch.cuda.memory_reserved()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Xe6GNbzTuqM1","executionInfo":{"status":"ok","timestamp":1745899671295,"user_tz":-420,"elapsed":12,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"3f6520f2-8099-4a69-f7c4-8a32509430df"},"execution_count":18,"outputs":[{"output_type":"execute_result","data":{"text/plain":["115343360"]},"metadata":{},"execution_count":18}]},{"cell_type":"code","source":["allocate_empty_tensor(3072)\n","torch.cuda.memory_reserved()"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"Fz0aLKJIus6M","executionInfo":{"status":"ok","timestamp":1745899692665,"user_tz":-420,"elapsed":41,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"7df4f4f6-41af-4e0a-9343-a0c7f4c9c3b4"},"execution_count":19,"outputs":[{"output_type":"execute_result","data":{"text/plain":["165675008"]},"metadata":{},"execution_count":19}]},{"cell_type":"code","source":["memory_stats = torch.cuda.memory_stats()\n","print(memory_stats[\"active.all.allocated\"])\n","print(memory_stats[\"active.all.current\"])\n","print(memory_stats[\"active.all.peak\"])\n","print(memory_stats[\"reserved_bytes.all.current\"])"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"K6yKS9GjuyHq","executionInfo":{"status":"ok","timestamp":1745899734063,"user_tz":-420,"elapsed":11,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"6ad7fc76-8d89-4ea8-dcc4-4a02fe13d305"},"execution_count":20,"outputs":[{"output_type":"stream","name":"stdout","text":["1321\n","7\n","17\n","165675008\n"]}]},{"cell_type":"code","source":["torch.cuda.empty_cache()\n","print(torch.cuda.memory_stats()[\"reserved_bytes.all.current\"])\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"-kWMLPjCu8O8","executionInfo":{"status":"ok","timestamp":1745899751380,"user_tz":-420,"elapsed":46,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"66fdb905-7e0c-4d34-a4d6-0b08078fa4e7"},"execution_count":21,"outputs":[{"output_type":"stream","name":"stdout","text":["81788928\n"]}]},{"cell_type":"code","source":["def batched_dot_mul_sum(a, b):\n"," \"\"\"Computes batched dot by multiplying and summing\"\"\"\n"," return a.mul(b).sum(-1)\n","\n","\n","def batched_dot_bmm(a, b):\n"," \"\"\"Computes batched dot by reducing to bmm\"\"\"\n"," a = a.reshape(-1, 1, a.shape[-1])\n"," b = b.reshape(-1, b.shape[-1], 1)\n"," return torch.bmm(a, b).flatten(-3)\n","\n","\n","# Input for benchmarking\n","x = torch.randn(10000, 64)\n","\n","# Ensure that both functions compute the same output\n","assert batched_dot_mul_sum(x, x).allclose(batched_dot_bmm(x, x))"],"metadata":{"id":"z3GO7THjvAc-","executionInfo":{"status":"ok","timestamp":1745899767244,"user_tz":-420,"elapsed":23,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}}},"execution_count":22,"outputs":[]},{"cell_type":"code","source":["import timeit\n","\n","t0 = timeit.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n",")\n","\n","t1 = timeit.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n",")\n","\n","print(f\"mul_sum(x, x): {t0.timeit(100) / 100 * 1e6:>5.1f} us\")\n","print(f\"bmm(x, x): {t1.timeit(100) / 100 * 1e6:>5.1f} us\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"EJ4DVJLXvEVP","executionInfo":{"status":"ok","timestamp":1745899802030,"user_tz":-420,"elapsed":43,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"63f907ab-a840-4955-8e25-2a9a48af889a"},"execution_count":23,"outputs":[{"output_type":"stream","name":"stdout","text":["mul_sum(x, x): 136.8 us\n","bmm(x, x): 191.7 us\n"]}]},{"cell_type":"code","source":["%timeit batched_dot_mul_sum(x, x)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"e8iQeITEvM0g","executionInfo":{"status":"ok","timestamp":1745899825900,"user_tz":-420,"elapsed":6414,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"f7cbd1eb-d894-4f0a-fdb6-3c07574faf70"},"execution_count":24,"outputs":[{"output_type":"stream","name":"stdout","text":["77.7 µs ± 969 ns per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n"]}]},{"cell_type":"code","source":["%timeit batched_dot_bmm(x, x)\n"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"qQaw9pBvvRF6","executionInfo":{"status":"ok","timestamp":1745899841249,"user_tz":-420,"elapsed":13410,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"f9ae087e-6aff-4c15-8216-bce09b290202"},"execution_count":25,"outputs":[{"output_type":"stream","name":"stdout","text":["164 µs ± 10.2 µs per loop (mean ± std. dev. of 7 runs, 10000 loops each)\n"]}]},{"cell_type":"code","source":["import torch.utils.benchmark as benchmark\n","\n","t0 = benchmark.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n",")\n","\n","t1 = benchmark.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n",")\n","\n","print(t0.timeit(100))\n","print(t1.timeit(100))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"i2oT8JOrvTIf","executionInfo":{"status":"ok","timestamp":1745899858165,"user_tz":-420,"elapsed":135,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"a1180041-e9a3-4476-f34d-0cc6ada58d50"},"execution_count":26,"outputs":[{"output_type":"stream","name":"stdout","text":["\n","batched_dot_mul_sum(x, x)\n","setup: from __main__ import batched_dot_mul_sum\n"," 390.79 us\n"," 1 measurement, 100 runs , 1 thread\n","\n","batched_dot_bmm(x, x)\n","setup: from __main__ import batched_dot_bmm\n"," 876.46 us\n"," 1 measurement, 100 runs , 1 thread\n"]}]},{"cell_type":"code","source":["# in addition, we can set the number of threads for CPU computations\n","num_threads = torch.get_num_threads()\n","print(f\"Benchmarking on {num_threads} threads\")\n","\n","t0 = benchmark.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n"," num_threads=num_threads,\n"," label=\"Multithreaded batch dot\",\n"," sub_label=\"Implemented using mul and sum\",\n",")\n","\n","t1 = benchmark.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n"," num_threads=num_threads,\n"," label=\"Multithreaded batch dot\",\n"," sub_label=\"Implemented using bmm\",\n",")\n","\n","print(t0.timeit(100))\n","print(t1.timeit(100))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"HAcbJQrUvagK","executionInfo":{"status":"ok","timestamp":1745899894949,"user_tz":-420,"elapsed":46,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"59c38bb4-31db-436d-ad4c-753fe16dd805"},"execution_count":27,"outputs":[{"output_type":"stream","name":"stdout","text":["Benchmarking on 6 threads\n","\n","Multithreaded batch dot: Implemented using mul and sum\n","setup: from __main__ import batched_dot_mul_sum\n"," 81.66 us\n"," 1 measurement, 100 runs , 6 threads\n","\n","Multithreaded batch dot: Implemented using bmm\n","setup: from __main__ import batched_dot_bmm\n"," 158.29 us\n"," 1 measurement, 100 runs , 6 threads\n"]}]},{"cell_type":"code","source":["# we can change it globally for PyTorch and measure the impact\n","prev_num_threads = num_threads\n","torch.set_num_threads(2)\n","\n","num_threads = torch.get_num_threads()\n","print(f\"Benchmarking on {num_threads} threads\")\n","\n","t0 = benchmark.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n"," num_threads=num_threads,\n"," label=\"Multithreaded batch dot\",\n"," sub_label=\"Implemented using mul and sum\",\n",")\n","\n","t1 = benchmark.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n"," num_threads=num_threads,\n"," label=\"Multithreaded batch dot\",\n"," sub_label=\"Implemented using bmm\",\n",")\n","\n","print(t0.timeit(100))\n","print(t1.timeit(100))\n","# in this case, we don't get any speedup, likely due to the overhead\n","\n","torch.set_num_threads(prev_num_threads)\n",""],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"QY1JvrpnvjgM","executionInfo":{"status":"ok","timestamp":1745899933895,"user_tz":-420,"elapsed":75,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"6c24ba27-68de-448e-e41c-148aab7bbdfc"},"execution_count":28,"outputs":[{"output_type":"stream","name":"stdout","text":["Benchmarking on 2 threads\n","\n","Multithreaded batch dot: Implemented using mul and sum\n","setup: from __main__ import batched_dot_mul_sum\n"," 206.36 us\n"," 1 measurement, 100 runs , 2 threads\n","\n","Multithreaded batch dot: Implemented using bmm\n","setup: from __main__ import batched_dot_bmm\n"," 450.58 us\n"," 1 measurement, 100 runs , 2 threads\n"]}]},{"cell_type":"code","source":["# by the way, what CPU do we have?\n","!lscpu"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"rftX9o8TvtAS","executionInfo":{"status":"ok","timestamp":1745899949819,"user_tz":-420,"elapsed":105,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"2ba39bb7-f1ff-49a5-af83-4fc04a9ea19c"},"execution_count":29,"outputs":[{"output_type":"stream","name":"stdout","text":["Architecture: x86_64\n"," CPU op-mode(s): 32-bit, 64-bit\n"," Address sizes: 46 bits physical, 48 bits virtual\n"," Byte Order: Little Endian\n","CPU(s): 12\n"," On-line CPU(s) list: 0-11\n","Vendor ID: GenuineIntel\n"," Model name: Intel(R) Xeon(R) CPU @ 2.20GHz\n"," CPU family: 6\n"," Model: 85\n"," Thread(s) per core: 2\n"," Core(s) per socket: 6\n"," Socket(s): 1\n"," Stepping: 7\n"," BogoMIPS: 4400.45\n"," Flags: fpu vme de pse tsc msr pae mce cx8 apic sep mtrr pge m\n"," ca cmov pat pse36 clflush mmx fxsr sse sse2 ss ht sysc\n"," all nx pdpe1gb rdtscp lm constant_tsc rep_good nopl xt\n"," opology nonstop_tsc cpuid tsc_known_freq pni pclmulqdq\n"," ssse3 fma cx16 pcid sse4_1 sse4_2 x2apic movbe popcnt\n"," aes xsave avx f16c rdrand hypervisor lahf_lm abm 3dno\n"," wprefetch invpcid_single ssbd ibrs ibpb stibp ibrs_enh\n"," anced fsgsbase tsc_adjust bmi1 hle avx2 smep bmi2 erms\n"," invpcid rtm mpx avx512f avx512dq rdseed adx smap clfl\n"," ushopt clwb avx512cd avx512bw avx512vl xsaveopt xsavec\n"," xgetbv1 xsaves arat avx512_vnni md_clear arch_capabil\n"," ities\n","Virtualization features: \n"," Hypervisor vendor: KVM\n"," Virtualization type: full\n","Caches (sum of all): \n"," L1d: 192 KiB (6 instances)\n"," L1i: 192 KiB (6 instances)\n"," L2: 6 MiB (6 instances)\n"," L3: 38.5 MiB (1 instance)\n","NUMA: \n"," NUMA node(s): 1\n"," NUMA node0 CPU(s): 0-11\n","Vulnerabilities: \n"," Gather data sampling: Not affected\n"," Itlb multihit: Not affected\n"," L1tf: Not affected\n"," Mds: Not affected\n"," Meltdown: Not affected\n"," Mmio stale data: Vulnerable\n"," Reg file data sampling: Not affected\n"," Retbleed: Vulnerable\n"," Spec rstack overflow: Not affected\n"," Spec store bypass: Vulnerable\n"," Spectre v1: Vulnerable: __user pointer sanitization and usercopy b\n"," arriers only; no swapgs barriers\n"," Spectre v2: Vulnerable; IBPB: disabled; STIBP: disabled; PBRSB-eIB\n"," RS: Vulnerable; BHI: Vulnerable\n"," Srbds: Not affected\n"," Tsx async abort: Vulnerable\n"]}]},{"cell_type":"code","source":["import timeit\n","\n","x = torch.randn(10000, 1024, device=\"cuda\")\n","\n","t0 = timeit.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n",")\n","\n","t1 = timeit.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n",")\n","\n","# Ran each twice to show difference before/after warmup\n","print(f\"mul_sum(x, x): {t0.timeit(100) / 100 * 1e6:>5.1f} us\")\n","print(f\"mul_sum(x, x): {t0.timeit(100) / 100 * 1e6:>5.1f} us\")\n","print(f\"bmm(x, x): {t1.timeit(100) / 100 * 1e6:>5.1f} us\")\n","print(f\"bmm(x, x): {t1.timeit(100) / 100 * 1e6:>5.1f} us\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"0zIF-8sEvw4w","executionInfo":{"status":"ok","timestamp":1745899974688,"user_tz":-420,"elapsed":109,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"f4e47cd7-c612-4fd4-e47b-72fc0207521c"},"execution_count":30,"outputs":[{"output_type":"stream","name":"stdout","text":["mul_sum(x, x): 292.4 us\n","mul_sum(x, x): 23.5 us\n","bmm(x, x): 703.0 us\n","bmm(x, x): 24.6 us\n"]}]},{"cell_type":"code","source":["t0 = benchmark.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," setup=\"from __main__ import batched_dot_mul_sum\",\n"," globals={\"x\": x},\n",")\n","\n","t1 = benchmark.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," setup=\"from __main__ import batched_dot_bmm\",\n"," globals={\"x\": x},\n",")\n","\n","# Run only once since benchmark module does warmup for us\n","print(t0.timeit(100))\n","print(t1.timeit(100))"],"metadata":{"colab":{"base_uri":"https://localhost:8080/"},"id":"nslgr1dDv29T","executionInfo":{"status":"ok","timestamp":1745900020680,"user_tz":-420,"elapsed":48,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"799b4fbb-e446-457f-97fa-94324f7457a5"},"execution_count":31,"outputs":[{"output_type":"stream","name":"stdout","text":["\n","batched_dot_mul_sum(x, x)\n","setup: from __main__ import batched_dot_mul_sum\n"," 363.77 us\n"," 1 measurement, 100 runs , 1 thread\n","\n","batched_dot_bmm(x, x)\n","setup: from __main__ import batched_dot_bmm\n"," 46.30 us\n"," 1 measurement, 100 runs , 1 thread\n"]}]},{"cell_type":"code","source":["# -*- coding: utf-8 -*-\n","\"\"\"\n","Concise Tutorial: PyTorch GPU Essentials & Benchmarking\n","\n","Based on seminar_01.ipynb, focusing on core concepts for efficient GPU usage.\n","Covers asynchronous execution, memory management, benchmarking, streams,\n","graphs, and debugging tips.\n","\"\"\"\n","\n","# %% [markdown]\n","# # PyTorch GPU Essentials & Benchmarking: A Concise Tutorial\n","#\n","# This tutorial covers fundamental concepts and best practices for working efficiently with PyTorch on CUDA-enabled GPUs. We'll explore:\n","#\n","# 1. **Asynchronous Execution:** Understanding how PyTorch interacts with the GPU and why synchronization is crucial for timing.\n","# 2. **Memory Management:** Basics of GPU memory, host-device transfers, and the caching allocator.\n","# 3. **Benchmarking GPU Code:** Reliable methods using `torch.utils.benchmark`, including warmup.\n","# 4. **Performance Features:** Using CUDA Streams for overlap and CUDA Graphs for reducing launch overhead.\n","# 5. **Debugging Tips:** Locating errors in asynchronous GPU code.\n","#\n","# **Goal:** Equip you with practical knowledge to write faster and more memory-efficient PyTorch code on GPUs.\n","\n","# %% [markdown]\n","# ## Setup: Imports and Device Check\n","\n","# %%\n","import torch\n","import numpy as np\n","import time\n","from time import perf_counter\n","import torch.utils.benchmark as benchmark # Recommended tool\n","\n","# Check for GPU availability and set device\n","if torch.cuda.is_available():\n"," device = torch.device(\"cuda\")\n"," print(f\"CUDA device found: {torch.cuda.get_device_name(0)}\")\n"," props = torch.cuda.get_device_properties(0)\n"," print(f\" Compute Capability: {props.major}.{props.minor}\")\n"," print(f\" Total Memory: {props.total_memory / (1024**3):.2f} GB\")\n"," print(f\" Multiprocessor Count: {props.multi_processor_count}\")\n","else:\n"," device = torch.device(\"cpu\")\n"," print(\"CUDA device not found. Using CPU (GPU examples will be skipped or run on CPU).\")\n"," # Set a flag for conditional execution of GPU-specific cells\n"," use_gpu = False\n"," props = None # No properties available\n","\n","if device.type == 'cuda':\n"," use_gpu = True\n","\n","# %% [markdown]\n","# ## 1. Asynchronous Execution & Synchronization\n","#\n","# CUDA operations launched from Python (like kernel execution or memory copies) are typically **asynchronous**. The CPU queues the task on the GPU and immediately returns, *before* the task is finished. This allows CPU and GPU parallelism but complicates timing.\n","#\n","# **Problem:** Naively timing Python calls doesn't measure the actual GPU execution time.\n","#\n","# **Solution:** Use `torch.cuda.synchronize()` to make the CPU wait until all previously queued tasks on the GPU have completed.\n","\n","# %%\n","if use_gpu:\n"," size = 2048\n"," a = torch.randn(size, size, device=device)\n"," b = torch.randn(size, size, device=device)\n","\n"," # --- Incorrect Timing (No Synchronization) ---\n"," start_time = perf_counter()\n"," c = torch.matmul(a, b)\n"," end_time = perf_counter()\n"," print(f\"Incorrect MM time (measures launch overhead): {(end_time - start_time) * 1000:.4f} ms\")\n","\n"," # --- Correct Timing (With Synchronization) ---\n"," # Ensure GPU is idle before starting timer\n"," torch.cuda.synchronize()\n"," start_time = perf_counter()\n"," c = torch.matmul(a, b)\n"," # Wait for matmul kernel to finish *before* stopping timer\n"," torch.cuda.synchronize()\n"," end_time = perf_counter()\n"," correct_time_ms = (end_time - start_time) * 1000\n"," print(f\"Correct MM time (with sync): {correct_time_ms:.4f} ms\")\n","\n"," # Note: Copying data back to CPU (e.g., .cpu(), .item()) also implicitly synchronizes.\n"," start_time = perf_counter()\n"," loss = c.sum().item() # .item() forces sync\n"," end_time = perf_counter()\n"," print(f\"Timing with implicit sync via .item(): {(end_time - start_time) * 1000:.4f} ms\")\n"," print(f\"Result (sum): {loss}\")\n","\n","else:\n"," print(\"Skipping async execution demo (requires GPU).\")\n","\n","# %% [markdown]\n","# **Key Takeaway:** Always use `torch.cuda.synchronize()` when manually timing GPU operations with tools like `time.perf_counter`. Better yet, use benchmarking utilities that handle this automatically.\n","\n","# %% [markdown]\n","# ## 2. Memory Management\n","#\n","# GPUs have their own memory (device memory), separate from CPU RAM (host memory). Data must be explicitly transferred.\n","#\n","# - **H2D (Host-to-Device):** `tensor.to(device)` or `tensor.cuda()`\n","# - **D2H (Device-to-Host):** `tensor.cpu()`\n","#\n","# These transfers occur over the PCIe bus and can be bottlenecks.\n","#\n","# ### Caching Allocator\n","# PyTorch uses a **caching allocator** for GPU memory to speed up allocations. When a tensor goes out of scope, its memory isn't immediately freed back to the OS but is kept **reserved** by PyTorch for potential reuse.\n","#\n","# - `torch.cuda.memory_allocated()`: Memory currently used by active tensors.\n","# - `torch.cuda.memory_reserved()`: Total memory held by the caching allocator (allocated + cached/free).\n","\n","# %%\n","if use_gpu:\n"," print(f\"Initial - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n","\n"," # Allocate tensor inside a function scope\n"," def allocate_temp_tensor(rows, cols):\n"," t = torch.empty(rows, cols, device=device)\n"," print(f\"Inside func - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n"," # Tensor 't' goes out of scope here\n","\n"," allocate_temp_tensor(2048, 2048) # Approx 16 MB tensor\n"," print(f\"After func - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n","\n"," # Allocate another tensor (smaller) - should reuse the reserved block\n"," allocate_temp_tensor(1024, 1024) # Approx 4 MB tensor\n"," print(f\"After smaller - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n","\n"," # Allocate a larger tensor - may need a new block, increasing reserved memory\n"," allocate_temp_tensor(4096, 4096) # Approx 64 MB tensor\n"," print(f\"After larger - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n","\n"," # Manually clear the cache (releases reserved memory back to OS)\n"," # WARNING: This causes CPU-GPU synchronization and can hurt performance. Avoid frequent use.\n"," torch.cuda.empty_cache()\n"," print(f\"After empty_cache - Allocated: {torch.cuda.memory_allocated() / (1024**2):.2f} MB, Reserved: {torch.cuda.memory_reserved() / (1024**2):.2f} MB\")\n","\n","else:\n"," print(\"Skipping memory management demo (requires GPU).\")\n","\n","# %% [markdown]\n","# **Memory Best Practices:**\n","# - Minimize H2D/D2H transfers.\n","# - Be mindful that reserved memory can grow if tensor sizes vary significantly. Pre-allocating for the largest expected size can sometimes help.\n","# - Avoid calling `torch.cuda.empty_cache()` frequently in performance-critical code. Better memory management is preferred.\n","# - Consider `pin_memory=True` for CPU tensors involved in frequent H2D transfers (e.g., in `DataLoader`) to potentially speed them up by avoiding an extra internal copy.\n","\n","# %% [markdown]\n","# ## 3. Benchmarking GPU Code Reliably\n","#\n","# Accurate benchmarking requires:\n","# 1. **Warmup:** Initial GPU operations can have extra overhead (context setup, kernel loading). Run the code a few times *before* measuring.\n","# 2. **Synchronization:** Ensure measurements capture the full GPU execution time.\n","# 3. **Averaging:** Run the operation multiple times and average the results for stability.\n","#\n","# `torch.utils.benchmark` is the recommended tool as it handles these aspects automatically.\n","\n","# %%\n","# Define functions to benchmark (from original notebook)\n","def batched_dot_mul_sum(a, b):\n"," \"\"\"Computes batched dot by multiplying and summing\"\"\"\n"," return a.mul(b).sum(-1)\n","\n","def batched_dot_bmm(a, b):\n"," \"\"\"Computes batched dot by reducing to bmm\"\"\"\n"," a = a.reshape(-1, 1, a.shape[-1])\n"," b = b.reshape(-1, b.shape[-1], 1)\n"," # Use .squeeze() instead of flatten for robustness if batch dim is 1\n"," return torch.bmm(a, b).squeeze(-1).squeeze(-1)\n","\n","if use_gpu:\n"," # Input data on GPU\n"," x_gpu = torch.randn(10000, 1024, device=device)\n","\n"," # Ensure correctness\n"," res1 = batched_dot_mul_sum(x_gpu, x_gpu)\n"," res2 = batched_dot_bmm(x_gpu, x_gpu)\n"," assert torch.allclose(res1, res2, atol=1e-5), \"Functions produce different results!\"\n","\n"," print(\"Benchmarking with torch.utils.benchmark (handles warmup & sync):\")\n","\n"," t0 = benchmark.Timer(\n"," stmt=\"batched_dot_mul_sum(x, x)\",\n"," globals={\"x\": x_gpu, \"batched_dot_mul_sum\": batched_dot_mul_sum}\n"," )\n","\n"," t1 = benchmark.Timer(\n"," stmt=\"batched_dot_bmm(x, x)\",\n"," globals={\"x\": x_gpu, \"batched_dot_bmm\": batched_dot_bmm}\n"," )\n","\n"," # timeit runs the statement multiple times and returns measurement object\n"," m0 = t0.timeit(100)\n"," m1 = t1.timeit(100)\n","\n"," print(f\"Method 1 (mul_sum):\\n{m0}\")\n"," print(f\"Method 2 (bmm):\\n{m1}\")\n","\n"," # Comparing results\n"," compare = benchmark.Compare([m0, m1])\n"," print(\"\\nComparison:\")\n"," compare.print()\n","\n"," # Quick check with IPython magic (also useful, handles basic timing)\n"," print(\"\\nBenchmarking with %timeit magic:\")\n"," %timeit batched_dot_mul_sum(x_gpu, x_gpu); torch.cuda.synchronize()\n"," %timeit batched_dot_bmm(x_gpu, x_gpu); torch.cuda.synchronize()\n","\n","else:\n"," print(\"Skipping benchmarking demo (requires GPU).\")\n","\n","# %% [markdown]\n","# **Benchmarking Takeaway:** Use `torch.utils.benchmark` for reliable microbenchmarks. It handles warmup, synchronization, and provides statistical summaries. `%timeit` is convenient for quick checks in notebooks, but remember to add `torch.cuda.synchronize()` manually.\n","\n","# %% [markdown]\n","# ## 4. Performance Features: Streams & Graphs\n","#\n","# ### CUDA Streams\n","# Streams allow **concurrent execution** of independent CUDA operations on the GPU, potentially overlapping computation with data transfers.\n","#\n","# **Use Case:** Overlap data loading/transfers (H2D) for the *next* batch with the computation of the *current* batch.\n","\n","# %%\n","if use_gpu and props and props.major >= 3: # Streams generally effective on Kepler+\n"," # Setup: Heavy compute task (MatMul) and data to transfer\n"," compute_stream = torch.cuda.Stream()\n"," h2d_stream = torch.cuda.Stream()\n","\n"," # Large matrix for compute-bound task\n"," A_gpu = torch.randn(8192, 8192, device=device)\n","\n"," # List of CPU tensors to simulate data loading pipeline\n"," # Use pinned memory for potentially faster H2D transfers\n"," cpu_data = [torch.randn(2048, 2048, device='cpu').pin_memory() for _ in range(10)]\n"," gpu_targets = [torch.empty_like(d, device=device) for d in cpu_data] # Pre-allocate GPU targets\n","\n"," # Ensure setup is complete\n"," torch.cuda.synchronize()\n","\n"," # --- Sequential Execution (Compute then Transfer) ---\n"," start_seq = perf_counter()\n"," comp_result = torch.matmul(A_gpu, A_gpu)\n"," # Ensure compute is done before starting transfers (implicit if not using streams)\n"," torch.cuda.synchronize()\n"," transfer_start = perf_counter()\n"," for i in range(len(cpu_data)):\n"," gpu_targets[i].copy_(cpu_data[i], non_blocking=False) # Blocking copy\n"," # Ensure transfers are done\n"," torch.cuda.synchronize()\n"," end_seq = perf_counter()\n"," print(f\"Sequential: Compute took ~{transfer_start - start_seq:.4f}s, Transfer took ~{end_seq - transfer_start:.4f}s, Total: {end_seq - start_seq:.4f}s\")\n","\n","\n"," # --- Streamed Execution (Overlap Compute and Transfer) ---\n"," start_stream = perf_counter()\n"," # Queue computation on the compute stream\n"," with torch.cuda.stream(compute_stream):\n"," comp_result_stream = torch.matmul(A_gpu, A_gpu)\n","\n"," # Queue transfers on the H2D stream\n"," with torch.cuda.stream(h2d_stream):\n"," for i in range(len(cpu_data)):\n"," # non_blocking=True is essential for overlap with other streams\n"," gpu_targets[i].copy_(cpu_data[i], non_blocking=True)\n","\n"," # Wait for *both* streams to complete\n"," compute_stream.synchronize()\n"," h2d_stream.synchronize()\n"," # OR simply torch.cuda.synchronize() waits for all streams on the device\n"," # torch.cuda.synchronize()\n"," end_stream = perf_counter()\n"," print(f\"Streamed (Overlap): Total time: {end_stream - start_stream:.4f}s\")\n","\n"," # Cleanup large tensor\n"," del A_gpu, comp_result, comp_result_stream\n"," torch.cuda.empty_cache()\n","\n","else:\n"," print(\"Skipping CUDA Streams demo (requires GPU, preferably Compute Capability >= 3.0).\")\n","\n","# %% [markdown]\n","# **Streams Takeaway:** Streams can hide data transfer latency behind computation *if* the tasks are independent and the GPU isn't already fully saturated by one task alone. Use `non_blocking=True` for copies intended to overlap.\n","\n","# %% [markdown]\n","# ### CUDA Graphs\n","# CUDA Graphs capture a sequence of GPU operations (kernel launches) and allow replaying them with very low CPU overhead.\n","#\n","# **Use Case:** Speeding up workloads dominated by **launching many small, fast kernels**, where the CPU overhead of launching each kernel becomes significant.\n","\n","# %%\n","if use_gpu:\n"," # Function with many small operations\n"," def func_many_small_ops(x):\n"," for _ in range(200): # Reduced iterations for faster demo\n"," x = torch.sigmoid(x * 1.01 + 0.01) + torch.relu(x - 0.5)\n"," return x\n","\n"," # Input for capture and replay\n"," static_input = torch.randn(1024, 1024, device=device)\n","\n"," # --- Standard Eager Execution ---\n"," print(\"Benchmarking Eager Execution:\")\n"," t_eager = benchmark.Timer(\n"," stmt=\"func_many_small_ops(x)\",\n"," globals={\"x\": static_input, \"func_many_small_ops\": func_many_small_ops}\n"," )\n"," m_eager = t_eager.timeit(50)\n"," print(m_eager)\n","\n","\n"," # --- CUDA Graph Execution ---\n"," # 1. Warmup (Important before graph capture)\n"," # Run the function once to ensure kernels are loaded, etc.\n"," _ = func_many_small_ops(static_input)\n"," torch.cuda.synchronize()\n","\n"," # 2. Capture\n"," graph = torch.cuda.CUDAGraph()\n"," # Create static tensors for capture inputs/outputs\n"," static_input_capture = static_input.clone()\n"," static_output_capture = torch.empty_like(static_input_capture)\n","\n"," with torch.cuda.graph(graph):\n"," # Operations inside this block are captured\n"," static_output_capture = func_many_small_ops(static_input_capture)\n","\n"," # 3. Replay\n"," # To run on new data, copy it into the original input tensor *before* replay\n"," new_data = torch.randn_like(static_input)\n"," static_input_capture.copy_(new_data)\n","\n"," print(\"\\nBenchmarking Graph Replay:\")\n"," t_graph = benchmark.Timer(\n"," stmt=\"graph.replay()\",\n"," # Setup includes copying new data before each replay measurement\n"," setup=\"static_input_capture.copy_(torch.randn_like(static_input))\",\n"," globals={\"graph\": graph, \"static_input_capture\": static_input_capture, \"torch\": torch, \"static_input\": static_input}\n"," )\n"," m_graph = t_graph.timeit(50)\n"," print(m_graph)\n","\n"," # Verify result (optional)\n"," # graph.replay() # Replay on the last copied data\n"," # eager_output = func_many_small_ops(static_input_capture.clone()) # Rerun eager on same data\n"," # print(f\"\\nGraph and Eager outputs close: {torch.allclose(static_output_capture, eager_output)}\")\n","\n","else:\n"," print(\"Skipping CUDA Graphs demo (requires GPU).\")\n","\n","# %% [markdown]\n","# **Graphs Takeaway:** CUDA Graphs significantly reduce CPU launch overhead for sequences of operations. Ideal for models or sections of code involving many small GPU tasks. Requires fixed operation sequence and careful handling of input/output tensors.\n","\n","# %% [markdown]\n","# ## 5. Debugging Asynchronous Errors\n","#\n","# Because GPU execution is asynchronous, errors (like out-of-bounds access) might not be reported until a later synchronizing operation (`.item()`, `.cpu()`, `torch.cuda.synchronize()`). The Python traceback might point to the wrong line.\n","#\n","# **Solution:** Set the environment variable `CUDA_LAUNCH_BLOCKING=1` when running your script. This forces every CUDA operation to run synchronously, making the traceback point to the exact line causing the GPU error.\n","#\n","# ```bash\n","# # Example command line execution\n","# CUDA_LAUNCH_BLOCKING=1 python your_script.py\n","# ```\n","#\n","# Let's simulate this (requires saving and running a separate file).\n","\n","# %%\n","# %%writefile gpu_error_example.py\n","import torch\n","import os\n","\n","# Use GPU if available, otherwise skip gracefully\n","if torch.cuda.is_available():\n"," device = torch.device(\"cuda\")\n"," print(f\"Running on {device}\")\n","\n"," try:\n"," embedding = torch.nn.Embedding(10, 4).to(device)\n"," # Error: Index 10 is out of bounds for embedding size 10 (valid indices 0-9)\n"," bad_input = torch.tensor([[1, 2], [3, 10]], dtype=torch.long, device=device)\n","\n"," print(\"Launching embedding lookup (potentially bad)...\")\n"," embedded_vals = embedding(bad_input) # Error occurs here async\n","\n"," print(\"Launching subsequent operation...\")\n"," result = torch.sigmoid(embedded_vals) # Another async op\n","\n"," print(\"Triggering synchronization via .sum().item()...\")\n"," loss = result.sum().item() # Error likely reported here in default mode\n"," print(f\"Loss (if successful): {loss}\")\n","\n"," except RuntimeError as e:\n"," print(\"\\n--- Caught RuntimeError ---\")\n"," print(e)\n"," # Check if the error message indicates a device-side assert\n"," if \"device-side assert triggered\" in str(e):\n"," print(\"\\nSuggestion: Rerun with 'CUDA_LAUNCH_BLOCKING=1 python gpu_error_example.py' for a more precise traceback.\")\n"," print(\"--- End of Error ---\")\n","\n","else:\n"," print(\"Skipping GPU error example (CUDA not available).\")\n","\n","print(\"Script finished.\")\n","\n","# %% [markdown]\n","# **Running the script normally (simulated output):**\n","# ```\n","# Running on cuda\n","# Launching embedding lookup (potentially bad)...\n","# Launching subsequent operation...\n","# Triggering synchronization via .sum().item()...\n","#\n","# --- Caught RuntimeError ---\n","# CUDA error: device-side assert triggered\n","# CUDA kernel errors might be asynchronously reported at some other API call, [...]\n","# Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions.\n","#\n","#\n","# Suggestion: Rerun with 'CUDA_LAUNCH_BLOCKING=1 python gpu_error_example.py' for a more precise traceback.\n","# --- End of Error ---\n","# Script finished.\n","# ```\n","# > Notice the error is caught near `.item()`, not the `embedding()` call.\n","#\n","# **Running with `CUDA_LAUNCH_BLOCKING=1` (simulated output):**\n","# ```\n","# Running on cuda\n","# Launching embedding lookup (potentially bad)...\n","#\n","# --- Caught RuntimeError ---\n","# CUDA error: device-side assert triggered\n","# Compile with `TORCH_USE_CUDA_DSA` to enable device-side assertions.\n","# --- End of Error ---\n","# Script finished.\n","# ```\n","# > With `CUDA_LAUNCH_BLOCKING=1`, the script would likely crash *during* the `embedding(bad_input)` call, giving a traceback pointing directly to that line. (Actual traceback varies).\n","\n","# %% [markdown]\n","# **Debugging Takeaway:** If you get generic CUDA errors, rerun with `CUDA_LAUNCH_BLOCKING=1` to pinpoint the faulty operation. Remember to turn it off for regular training/inference as it hurts performance.\n","\n","# %% [markdown]\n","# ## 6. Floating Point Precision\n","#\n","# Be aware that floating-point arithmetic is not always associative, and results can differ slightly between:\n","# - CPU vs. GPU execution\n","# - Different GPUs or CUDA versions\n","# - Different libraries (e.g., PyTorch vs. NumPy)\n","# - Even run-to-run on some hardware due to non-deterministic algorithms (though PyTorch tries to mitigate this).\n","#\n","# This is usually only problematic if your code relies on exact floating-point equality. For most DL tasks, small numerical differences are acceptable. `torch.allclose()` is useful for comparing tensors with tolerance.\n","\n","# %%\n","torch.manual_seed(1337)\n","x_cpu = torch.randn(1000, 1000, dtype=torch.float32)\n","x_gpu = x_cpu.to(device)\n","\n","def matrix_power_sum(x):\n"," # Use float64 for intermediate steps to reduce accumulation error magnitude for demo\n"," y = x.double() @ x.double() @ x.double() @ x.double() @ x.double() @ x.double()\n"," return y.sum().item()\n","\n","if use_gpu:\n"," # Disable deterministic algorithms for this demo if enabled globally\n"," deterministic_setting = torch.are_deterministic_algorithms_enabled()\n"," torch.use_deterministic_algorithms(False)\n","\n"," res_cpu = matrix_power_sum(x_cpu)\n"," res_gpu = matrix_power_sum(x_gpu)\n","\n"," print(f\"Result CPU: {res_cpu}\")\n"," print(f\"Result GPU: {res_gpu}\")\n"," print(f\"Difference: {abs(res_cpu - res_gpu)}\")\n"," print(f\"Relative Difference: {abs(res_cpu - res_gpu) / abs(res_cpu):.2e}\")\n","\n"," # Restore deterministic setting\n"," torch.use_deterministic_algorithms(deterministic_setting)\n","else:\n"," print(\"Skipping FP precision demo (requires GPU).\")\n","\n","\n","# %% [markdown]\n","# ## Conclusion & Key Takeaways\n","#\n","# Efficiently using GPUs in PyTorch involves understanding their asynchronous nature and memory hierarchy.\n","#\n","# - **Synchronization is Key:** Use `torch.cuda.synchronize()` for manual timing; rely on `torch.utils.benchmark` which handles it automatically.\n","# - **Warmup Before Benchmarking:** Initial runs can be slower.\n","# - **Manage Memory:** Minimize host-device transfers. Use pinned memory strategically for H2D. Avoid frequent `empty_cache()`.\n","# - **Leverage Advanced Features:** Use CUDA Streams to overlap computation and data transfer. Use CUDA Graphs to reduce launch overhead for small kernels.\n","# - **Debug Smart:** Use `CUDA_LAUNCH_BLOCKING=1` to locate the source of asynchronous GPU errors.\n","# - **Be Aware of Precision:** Expect small floating-point differences across devices/runs.\n","#\n","# Applying these principles will help you build faster and more efficient deep learning systems.\n","\n","# %%\n","print(\"End of tutorial.\")"],"metadata":{"colab":{"base_uri":"https://localhost:8080/","height":883},"id":"whCpKigYwCMy","executionInfo":{"status":"error","timestamp":1745903402396,"user_tz":-420,"elapsed":126,"user":{"displayName":"Laam Pham","userId":"04566654796696849937"}},"outputId":"494be443-fae5-4564-c9d8-2d2835328ab9"},"execution_count":3,"outputs":[{"output_type":"stream","name":"stdout","text":["CUDA device found: NVIDIA L4\n"," Compute Capability: 8.9\n"," Total Memory: 22.16 GB\n"," Multiprocessor Count: 58\n","Incorrect MM time (measures launch overhead): 0.1014 ms\n","Correct MM time (with sync): 1.3654 ms\n","Timing with implicit sync via .item(): 0.1072 ms\n","Result (sum): -71633.71875\n","Initial - Allocated: 140.20 MB, Reserved: 178.00 MB\n","Inside func - Allocated: 156.20 MB, Reserved: 178.00 MB\n","After func - Allocated: 140.20 MB, Reserved: 178.00 MB\n","Inside func - Allocated: 144.20 MB, Reserved: 178.00 MB\n","After smaller - Allocated: 140.20 MB, Reserved: 178.00 MB\n","Inside func - Allocated: 204.20 MB, Reserved: 242.00 MB\n","After larger - Allocated: 140.20 MB, Reserved: 242.00 MB\n","After empty_cache - Allocated: 140.20 MB, Reserved: 156.00 MB\n","Benchmarking with torch.utils.benchmark (handles warmup & sync):\n","Method 1 (mul_sum):\n","\n","batched_dot_mul_sum(x, x)\n"," 364.62 us\n"," 1 measurement, 100 runs , 1 thread\n","Method 2 (bmm):\n","\n","batched_dot_bmm(x, x)\n"," 46.25 us\n"," 1 measurement, 100 runs , 1 thread\n","\n","Comparison:\n"]},{"output_type":"error","ename":"TypeError","evalue":"object of type 'NoneType' has no len()","traceback":["\u001b[0;31m---------------------------------------------------------------------------\u001b[0m","\u001b[0;31mTypeError\u001b[0m Traceback (most recent call last)","\u001b[0;32m\u001b[0m in \u001b[0;36m\u001b[0;34m()\u001b[0m\n\u001b[1;32m 200\u001b[0m \u001b[0mcompare\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mbenchmark\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mCompare\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0mm0\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mm1\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 201\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m\"\\nComparison:\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 202\u001b[0;31m \u001b[0mcompare\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 203\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 204\u001b[0m \u001b[0;31m# Quick check with IPython magic (also useful, handles basic timing)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36mprint\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 322\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 323\u001b[0m \u001b[0;34m\"\"\"Print formatted table\"\"\"\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 324\u001b[0;31m \u001b[0mprint\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstr\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 325\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 326\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m_render\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m__str__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 291\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 292\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0m__str__\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 293\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0;34m\"\\n\"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_render\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 294\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 295\u001b[0m \u001b[0;32mdef\u001b[0m \u001b[0mextend_results\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mresults\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m:\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m_render\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 327\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcommon\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mMeasurement\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmerge\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_results\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 328\u001b[0m \u001b[0mgrouped_results\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_group_by_label\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresults\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 329\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_layout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgrouped_results\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 330\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 331\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 327\u001b[0m \u001b[0mresults\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mcommon\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mMeasurement\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mmerge\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_results\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 328\u001b[0m \u001b[0mgrouped_results\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_group_by_label\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mresults\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 329\u001b[0;31m \u001b[0moutput\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_layout\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mgroup\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mgroup\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mgrouped_results\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mvalues\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 330\u001b[0m \u001b[0;32mreturn\u001b[0m \u001b[0moutput\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 331\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m_layout\u001b[0;34m(self, results)\u001b[0m\n\u001b[1;32m 343\u001b[0m \u001b[0mself\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0m_highlight_warnings\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 344\u001b[0m )\n\u001b[0;32m--> 345\u001b[0;31m \u001b[0;32mreturn\u001b[0m \u001b[0mtable\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mrender\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36mrender\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m 250\u001b[0m \u001b[0msr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"\"\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnum_cols\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 251\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 252\u001b[0;31m \u001b[0mcol_widths\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mj\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 253\u001b[0m \u001b[0mfinalized_columns\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\" | \"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcenter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcol_widths\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 254\u001b[0m \u001b[0moverall_width\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfinalized_columns\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 250\u001b[0m \u001b[0msr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"\"\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnum_cols\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 251\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 252\u001b[0;31m \u001b[0mcol_widths\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mj\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 253\u001b[0m \u001b[0mfinalized_columns\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\" | \"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcenter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcol_widths\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 254\u001b[0m \u001b[0moverall_width\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfinalized_columns\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;32m/usr/local/lib/python3.11/dist-packages/torch/utils/benchmark/utils/compare.py\u001b[0m in \u001b[0;36m\u001b[0;34m(.0)\u001b[0m\n\u001b[1;32m 250\u001b[0m \u001b[0msr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mextend\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;34m\"\"\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0m_\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mrange\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mnum_cols\u001b[0m \u001b[0;34m-\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0msr\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 251\u001b[0m \u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m--> 252\u001b[0;31m \u001b[0mcol_widths\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0mmax\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mj\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mj\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0;34m*\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 253\u001b[0m \u001b[0mfinalized_columns\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m\" | \"\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mjoin\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mi\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mcenter\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mw\u001b[0m\u001b[0;34m)\u001b[0m \u001b[0;32mfor\u001b[0m \u001b[0mi\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mw\u001b[0m \u001b[0;32min\u001b[0m \u001b[0mzip\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mstring_rows\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0mcol_widths\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 254\u001b[0m \u001b[0moverall_width\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mlen\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfinalized_columns\u001b[0m\u001b[0;34m[\u001b[0m\u001b[0;36m0\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n","\u001b[0;31mTypeError\u001b[0m: object of type 'NoneType' has no len()"]}]},{"cell_type":"code","source":[],"metadata":{"id":"Fglbei4M8qD9"},"execution_count":null,"outputs":[]}]}