From 9cc2bd2782c93db4480924375860e3b1fde86933 Mon Sep 17 00:00:00 2001 From: Bradlee Speice Date: Sun, 10 Nov 2024 16:36:22 -0500 Subject: [PATCH] Revert --- .devcontainer/Dockerfile | 8 +- .devcontainer/devcontainer.json | 40 +- .gitignore | 32 +- .husky/pre-commit | 4 - .vale.ini | 7 + 404.html | 24 + CNAME | 1 + Gemfile | 29 + Gemfile.lock | 78 + _config.yml | 44 + _includes/footer.html | 23 + _includes/head.html | 7 + _includes/nav.html | 7 + _includes/page_header.html | 15 + _pages/about.md | 13 + _posts/2018-05-28-hello.md | 38 + _posts/2018-06-25-dateutil-parser-to-rust.md | 177 + ...2018-09-01-primitives-in-rust-are-weird.md | 323 ++ _posts/2018-09-15-isomorphic-apps.md | 294 ++ _posts/2018-10-08-case-study-optimization.md | 168 + ...-12-04-what-small-business-really-means.md | 34 + _posts/2018-12-15-allocation-safety.md | 218 + ...02-04-understanding-allocations-in-rust.md | 113 + .../2019-02-05-the-whole-world.md | 11 +- _posts/2019-02-06-stacking-up.md | 601 +++ _posts/2019-02-07-a-heaping-helping.md | 254 ++ _posts/2019-02-08-compiler-optimizations.md | 148 + _posts/2019-02-09-summary.md | 35 + _posts/2019-05-03-making-bread.md | 52 + _posts/2019-06-31-high-performance-systems.md | 296 ++ _posts/2019-09-28-binary-format-shootout.md | 263 ++ _posts/2019-12-14-release-the-gil.md | 370 ++ .../2022-11-20-webpack-industrial-complex.md | 60 + assets/css/fonts.css | 15 + assets/css/style.scss | 119 + assets/font/JetBrainsMono-Regular.woff | Bin 0 -> 59368 bytes assets/font/JetBrainsMono-Regular.woff2 | Bin 0 -> 44688 bytes assets/font/lato-regular-webfont.woff | Bin 0 -> 38260 bytes assets/font/lato-regular-webfont.woff2 | Bin 0 -> 30308 bytes assets/images/2018-05-28-bread.jpg | Bin 0 -> 860195 bytes assets/images/2018-05-28-rocks.jpg | Bin 0 -> 947828 bytes assets/images/2018-06-25-gravel-mound.jpg | Bin 0 -> 168900 bytes .../images/2018-09-15-electron-percy-wasm.png | Bin 0 -> 51642 bytes .../images/2018-09-15-incorrect-MIME-type.png | Bin 0 -> 49655 bytes .../2018-10-heaptrack/heaptrack-after.png | Bin 0 -> 72414 bytes .../2018-10-heaptrack/heaptrack-before.png | Bin 0 -> 69842 bytes .../heaptrack-closeup-after.png | Bin 0 -> 23782 bytes .../2018-10-heaptrack/heaptrack-closeup.png | Bin 0 -> 24166 bytes .../heaptrack-dtparse-colorized.png | Bin 0 -> 126839 bytes .../heaptrack-flamegraph-after.png | Bin 0 -> 148377 bytes .../heaptrack-flamegraph-default.png | Bin 0 -> 138608 bytes .../heaptrack-flamegraph.png | Bin 0 -> 141686 bytes .../heaptrack-flamegraph.xcf | Bin 0 -> 585864 bytes .../heaptrack-main-colorized.png | Bin 0 -> 100144 bytes .../heaptrack-parseinfo-colorized.png | Bin 0 -> 137074 bytes assets/images/2019-02-04-container-size.svg | 1 + assets/images/2019-04-24-kung-fu.webp | Bin 0 -> 310854 bytes .../2019-05-03-making-bread/final-product.jpg | Bin 0 -> 352383 bytes .../2019-05-03-making-bread/shaped-loaves.jpg | Bin 0 -> 303504 bytes .../shattered-glass.jpg | Bin 0 -> 385640 bytes .../white-dough-rising-after-fold.jpg | Bin 0 -> 173498 bytes .../white-dough-rising-before-fold.jpg | Bin 0 -> 143739 bytes .../whole-wheat-not-rising.jpg | Bin 0 -> 198406 bytes assets/images/2022-11-20-video_mp2t.png | Bin 0 -> 48651 bytes index.md | 6 + package-lock.json | 3711 ----------------- package.json | 43 - pages/LayoutBase.tsx | 16 - pages/LayoutPage.tsx | 12 - pages/Navbar.tsx | 18 - pages/about.mdx | 23 - pages/index.tsx | 18 - pages/style.css | 130 - posts/LayoutBlog.tsx | 49 - tsconfig.json | 19 - vite.config.ts | 30 - 76 files changed, 3860 insertions(+), 4137 deletions(-) delete mode 100755 .husky/pre-commit create mode 100644 .vale.ini create mode 100644 404.html create mode 100644 CNAME create mode 100644 Gemfile create mode 100644 Gemfile.lock create mode 100644 _config.yml create mode 100644 _includes/footer.html create mode 100644 _includes/head.html create mode 100644 _includes/nav.html create mode 100644 _includes/page_header.html create mode 100644 _pages/about.md create mode 100644 _posts/2018-05-28-hello.md create mode 100644 _posts/2018-06-25-dateutil-parser-to-rust.md create mode 100644 _posts/2018-09-01-primitives-in-rust-are-weird.md create mode 100644 _posts/2018-09-15-isomorphic-apps.md create mode 100644 _posts/2018-10-08-case-study-optimization.md create mode 100644 _posts/2018-12-04-what-small-business-really-means.md create mode 100644 _posts/2018-12-15-allocation-safety.md create mode 100644 _posts/2019-02-04-understanding-allocations-in-rust.md rename posts/2019/02/the-whole-world.mdx => _posts/2019-02-05-the-whole-world.md (98%) create mode 100644 _posts/2019-02-06-stacking-up.md create mode 100644 _posts/2019-02-07-a-heaping-helping.md create mode 100644 _posts/2019-02-08-compiler-optimizations.md create mode 100644 _posts/2019-02-09-summary.md create mode 100644 _posts/2019-05-03-making-bread.md create mode 100644 _posts/2019-06-31-high-performance-systems.md create mode 100644 _posts/2019-09-28-binary-format-shootout.md create mode 100644 _posts/2019-12-14-release-the-gil.md create mode 100644 _posts/2022-11-20-webpack-industrial-complex.md create mode 100644 assets/css/fonts.css create mode 100644 assets/css/style.scss create mode 100644 assets/font/JetBrainsMono-Regular.woff create mode 100644 assets/font/JetBrainsMono-Regular.woff2 create mode 100644 assets/font/lato-regular-webfont.woff create mode 100644 assets/font/lato-regular-webfont.woff2 create mode 100644 assets/images/2018-05-28-bread.jpg create mode 100644 assets/images/2018-05-28-rocks.jpg create mode 100644 assets/images/2018-06-25-gravel-mound.jpg create mode 100644 assets/images/2018-09-15-electron-percy-wasm.png create mode 100644 assets/images/2018-09-15-incorrect-MIME-type.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-after.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-before.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-closeup-after.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-closeup.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-dtparse-colorized.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-flamegraph-after.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-flamegraph-default.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-flamegraph.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-flamegraph.xcf create mode 100644 assets/images/2018-10-heaptrack/heaptrack-main-colorized.png create mode 100644 assets/images/2018-10-heaptrack/heaptrack-parseinfo-colorized.png create mode 100644 assets/images/2019-02-04-container-size.svg create mode 100644 assets/images/2019-04-24-kung-fu.webp create mode 100644 assets/images/2019-05-03-making-bread/final-product.jpg create mode 100644 assets/images/2019-05-03-making-bread/shaped-loaves.jpg create mode 100644 assets/images/2019-05-03-making-bread/shattered-glass.jpg create mode 100644 assets/images/2019-05-03-making-bread/white-dough-rising-after-fold.jpg create mode 100644 assets/images/2019-05-03-making-bread/white-dough-rising-before-fold.jpg create mode 100644 assets/images/2019-05-03-making-bread/whole-wheat-not-rising.jpg create mode 100644 assets/images/2022-11-20-video_mp2t.png create mode 100644 index.md delete mode 100644 package-lock.json delete mode 100644 package.json delete mode 100644 pages/LayoutBase.tsx delete mode 100644 pages/LayoutPage.tsx delete mode 100644 pages/Navbar.tsx delete mode 100644 pages/about.mdx delete mode 100644 pages/index.tsx delete mode 100644 pages/style.css delete mode 100644 posts/LayoutBlog.tsx delete mode 100644 tsconfig.json delete mode 100644 vite.config.ts diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile index b59f725..7159a0b 100644 --- a/.devcontainer/Dockerfile +++ b/.devcontainer/Dockerfile @@ -1,2 +1,6 @@ -ARG VARIANT=16-bullseye -FROM mcr.microsoft.com/vscode/devcontainers/typescript-node:0-${VARIANT} +FROM mcr.microsoft.com/vscode/devcontainers/ruby:0-2.7-bullseye + +RUN wget https://github.com/errata-ai/vale/releases/download/v2.21.0/vale_2.21.0_Linux_64-bit.tar.gz -O /tmp/vale.tar.gz \ + && cd /usr/local/bin \ + && tar xf /tmp/vale.tar.gz \ + && rm /tmp/vale.tar.gz \ No newline at end of file diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json index a82cd7e..1d206de 100644 --- a/.devcontainer/devcontainer.json +++ b/.devcontainer/devcontainer.json @@ -1,35 +1,13 @@ // For format details, see https://aka.ms/devcontainer.json. For config options, see the README at: -// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.0/containers/typescript-node +// https://github.com/microsoft/vscode-dev-containers/tree/v0.245.0/containers/ruby { - "name": "Node.js & TypeScript", - "build": { - "dockerfile": "Dockerfile", - // Update 'VARIANT' to pick a Node version: 18, 16, 14. - // Append -bullseye or -buster to pin to an OS version. - // Use -bullseye variants on local on arm64/Apple Silicon. - "args": { - "VARIANT": "18-bullseye" - } - }, - "runArgs": ["--userns=keep-id"], + "name": "Ruby", + "build": { + "dockerfile": "Dockerfile" + }, + "runArgs": ["--userns=keep-id"], - // Configure tool-specific properties. - "customizations": { - // Configure properties specific to VS Code. - "vscode": { - // Add the IDs of extensions you want installed when the container is created. - "extensions": ["dbaeumer.vscode-eslint"] - } - }, - - // Use 'forwardPorts' to make a list of ports inside the container available locally. - // "forwardPorts": [], - - // Use 'postCreateCommand' to run commands after the container is created. - // "postCreateCommand": "yarn install", - - // Comment out to connect as root instead. More info: https://aka.ms/vscode-remote/containers/non-root. - "remoteUser": "node", - "containerUser": "node", - "workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/${localWorkspaceFolderBasename},type=bind,Z" + "remoteUser": "vscode", + "containerUser": "vscode", + "workspaceMount": "source=${localWorkspaceFolder},target=/workspaces/${localWorkspaceFolderBasename},type=bind,Z" } diff --git a/.gitignore b/.gitignore index a547bf3..508b6b2 100644 --- a/.gitignore +++ b/.gitignore @@ -1,24 +1,8 @@ -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -pnpm-debug.log* -lerna-debug.log* - -node_modules -dist -dist-ssr -*.local - -# Editor directories and files -.vscode/* -!.vscode/extensions.json -.idea -.DS_Store -*.suo -*.ntvs* -*.njsproj -*.sln -*.sw? +_site/ +.swp +.sass-cache/ +.jekyll-metadata +.bundle/ +vendor/ +.styles/ +.vscode/ diff --git a/.husky/pre-commit b/.husky/pre-commit deleted file mode 100755 index 0da96d6..0000000 --- a/.husky/pre-commit +++ /dev/null @@ -1,4 +0,0 @@ -#!/usr/bin/env sh -. "$(dirname -- "$0")/_/husky.sh" - -npx pretty-quick --staged diff --git a/.vale.ini b/.vale.ini new file mode 100644 index 0000000..777f626 --- /dev/null +++ b/.vale.ini @@ -0,0 +1,7 @@ +StylesPath = .styles +MinAlertLevel = suggestion +Packages = Microsoft, write-good + +[*] +BasedOnStyles = Vale, Microsoft, write-good +write-good.E-Prime = NO \ No newline at end of file diff --git a/404.html b/404.html new file mode 100644 index 0000000..3969180 --- /dev/null +++ b/404.html @@ -0,0 +1,24 @@ +--- +layout: page +--- + + + +
+

404

+ +

Page not found :(

+

The requested page could not be found.

+
diff --git a/CNAME b/CNAME new file mode 100644 index 0000000..8875e7a --- /dev/null +++ b/CNAME @@ -0,0 +1 @@ +speice.io diff --git a/Gemfile b/Gemfile new file mode 100644 index 0000000..3031ef3 --- /dev/null +++ b/Gemfile @@ -0,0 +1,29 @@ +source "https://rubygems.org" + +# Hello! This is where you manage which Jekyll version is used to run. +# When you want to use a different version, change it below, save the +# file and run `bundle install`. Run Jekyll with `bundle exec`, like so: +# +# bundle exec jekyll serve +# +# This will help ensure the proper Jekyll version is running. +# Happy Jekylling! +gem "jekyll", "~> 3.8.3" + +gem "texture" + +# If you want to use GitHub Pages, remove the "gem "jekyll"" above and +# uncomment the line below. To upgrade, run `bundle update github-pages`. +# gem "github-pages", group: :jekyll_plugins + +# If you have any plugins, put them here! +group :jekyll_plugins do + gem "jekyll-feed", "~> 0.6" + gem "jekyll-remote-theme" +end + +# Windows does not include zoneinfo files, so bundle the tzinfo-data gem +gem "tzinfo-data", platforms: [:mingw, :mswin, :x64_mingw, :jruby] + +# Performance-booster for watching directories on Windows +gem "wdm", "~> 0.1.0" if Gem.win_platform? diff --git a/Gemfile.lock b/Gemfile.lock new file mode 100644 index 0000000..310c738 --- /dev/null +++ b/Gemfile.lock @@ -0,0 +1,78 @@ +GEM + remote: https://rubygems.org/ + specs: + addressable (2.7.0) + public_suffix (>= 2.0.2, < 5.0) + colorator (1.1.0) + concurrent-ruby (1.1.6) + em-websocket (0.5.1) + eventmachine (>= 0.12.9) + http_parser.rb (~> 0.6.0) + eventmachine (1.2.7) + ffi (1.12.2) + forwardable-extended (2.6.0) + http_parser.rb (0.6.0) + i18n (0.9.5) + concurrent-ruby (~> 1.0) + jekyll (3.8.6) + addressable (~> 2.4) + colorator (~> 1.0) + em-websocket (~> 0.5) + i18n (~> 0.7) + jekyll-sass-converter (~> 1.0) + jekyll-watch (~> 2.0) + kramdown (~> 1.14) + liquid (~> 4.0) + mercenary (~> 0.3.3) + pathutil (~> 0.9) + rouge (>= 1.7, < 4) + safe_yaml (~> 1.0) + jekyll-feed (0.13.0) + jekyll (>= 3.7, < 5.0) + jekyll-remote-theme (0.4.2) + addressable (~> 2.0) + jekyll (>= 3.5, < 5.0) + jekyll-sass-converter (>= 1.0, <= 3.0.0, != 2.0.0) + rubyzip (>= 1.3.0, < 3.0) + jekyll-sass-converter (1.5.2) + sass (~> 3.4) + jekyll-seo-tag (2.6.1) + jekyll (>= 3.3, < 5.0) + jekyll-watch (2.2.1) + listen (~> 3.0) + kramdown (1.17.0) + liquid (4.0.3) + listen (3.2.1) + rb-fsevent (~> 0.10, >= 0.10.3) + rb-inotify (~> 0.9, >= 0.9.10) + mercenary (0.3.6) + pathutil (0.16.2) + forwardable-extended (~> 2.6) + public_suffix (4.0.4) + rb-fsevent (0.10.3) + rb-inotify (0.10.1) + ffi (~> 1.0) + rouge (3.17.0) + rubyzip (2.3.0) + safe_yaml (1.0.5) + sass (3.7.4) + sass-listen (~> 4.0.0) + sass-listen (4.0.0) + rb-fsevent (~> 0.9, >= 0.9.4) + rb-inotify (~> 0.9, >= 0.9.7) + texture (0.3) + jekyll (~> 3.7) + jekyll-seo-tag (~> 2.1) + +PLATFORMS + ruby + +DEPENDENCIES + jekyll (~> 3.8.3) + jekyll-feed (~> 0.6) + jekyll-remote-theme + texture + tzinfo-data + +BUNDLED WITH + 2.1.4 diff --git a/_config.yml b/_config.yml new file mode 100644 index 0000000..9f9c8cd --- /dev/null +++ b/_config.yml @@ -0,0 +1,44 @@ +# Welcome to Jekyll! +# +# This config file is meant for settings that affect your whole blog, values +# which you are expected to set up once and rarely edit after that. If you find +# yourself editing this file very often, consider using Jekyll's data files +# feature for the data you need to update frequently. +# +# For technical reasons, this file is *NOT* reloaded automatically when you use +# 'bundle exec jekyll serve'. If you change this file, please restart the server process. + +# Site settings +# These are used to personalize your new site. If you look in the HTML files, +# you will see them accessed via {{ site.title }}, {{ site.email }}, and so on. +# You can create any custom variable you would like, and they will be accessible +# in the templates via {{ site.myvariable }}. +title: speice.io +description: The Old Speice Guy +email: bradlee@speice.io +baseurl: "" # the subpath of your site, e.g. /blog +url: "https://speice.io/" # the base hostname & protocol for your site, e.g. http://example.com +github_username: bspeice + +# Build settings +markdown: kramdown +# theme: texture +remote_theme: thelehhman/texture +plugins: + - jekyll-feed + - jekyll-remote-theme + +include: [_pages] +permalink: /:year/:month/:title.html + +# Exclude from processing. +# The following items will not be processed, by default. Create a custom list +# to override the default setting. +# exclude: +# - Gemfile +# - Gemfile.lock +# - node_modules +# - vendor/bundle/ +# - vendor/cache/ +# - vendor/gems/ +# - vendor/ruby/ diff --git a/_includes/footer.html b/_includes/footer.html new file mode 100644 index 0000000..4d3c143 --- /dev/null +++ b/_includes/footer.html @@ -0,0 +1,23 @@ +{% if page.layout == 'post' %} +{% comment %}Thanks to https://www.bytedude.com/jekyll-previous-and-next-posts/{% endcomment %} +
+
+
+
+ {% if page.previous.url %} + « {{page.previous.title}} + {% endif %} +
+
+ {% if page.next.url %} + {{page.next.title}} » + {% endif %} +
+
+
+ + + +{% endif %} \ No newline at end of file diff --git a/_includes/head.html b/_includes/head.html new file mode 100644 index 0000000..ec0f85d --- /dev/null +++ b/_includes/head.html @@ -0,0 +1,7 @@ + + + + + +{{ page.title | default: site.title }} +{% seo %} \ No newline at end of file diff --git a/_includes/nav.html b/_includes/nav.html new file mode 100644 index 0000000..14ca138 --- /dev/null +++ b/_includes/nav.html @@ -0,0 +1,7 @@ + \ No newline at end of file diff --git a/_includes/page_header.html b/_includes/page_header.html new file mode 100644 index 0000000..e486090 --- /dev/null +++ b/_includes/page_header.html @@ -0,0 +1,15 @@ +
+

{{ site.title }}

+

{{ site.description }}

+ +
\ No newline at end of file diff --git a/_pages/about.md b/_pages/about.md new file mode 100644 index 0000000..3f45159 --- /dev/null +++ b/_pages/about.md @@ -0,0 +1,13 @@ +--- +layout: page +title: About +permalink: /about/ +--- + +Developer currently living in New York City. + +Best ways to get in contact: + +- Email: [bradlee@speice.io](mailto:bradlee@speice.io) +- Github: [bspeice](https://github.com/bspeice) +- LinkedIn: [bradleespeice](https://www.linkedin.com/in/bradleespeice/) diff --git a/_posts/2018-05-28-hello.md b/_posts/2018-05-28-hello.md new file mode 100644 index 0000000..f7c76c7 --- /dev/null +++ b/_posts/2018-05-28-hello.md @@ -0,0 +1,38 @@ +--- +layout: post +title: "Hello!" +description: "" +category: +tags: [] +--- + +I'll do what I can to keep this short, there's plenty of other things we both should be doing right +now. + +If you're here for the bread pics, and to marvel in some other culinary side projects, I've got you +covered: + +![Saturday Bread]({{ "/assets/images/2018-05-28-bread.jpg" | absolute_url }}) + +And no, I'm not posting pictures of earlier attempts that ended up turning into rocks in the oven. + +Okay, just one: + +![Bread as rock]({{ "/assets/images/2018-05-28-rocks.jpg" | absolute_url }}) + +If you're here for keeping up with the man Bradlee Speice, got plenty of that too. Plus some +up-coming super-nerdy posts about how I'm changing the world. + +And if you're not here for those things: don't have a lot for you, sorry. But you're welcome to let +me know what needs to change. + +I'm looking forward to making this a place to talk about what's going on in life, I hope you'll +stick it out with me. The best way to follow what's going on is on my [About](/about/) page, but if +you want the joy of clicking links, here's a few good ones: + +- Email (people still use this?): [bradlee@speice.io](mailto:bradlee@speice.io) +- Mastodon (nerd Twitter): [@bradlee](https://mastodon.social/@bradlee) +- Chat (RiotIM): [@bspeice:matrix.com](https://matrix.to/#/@bspeice:matrix.com) +- The comments section (not for people with sanity intact): ↓↓↓ + +Thanks, and keep it amazing. diff --git a/_posts/2018-06-25-dateutil-parser-to-rust.md b/_posts/2018-06-25-dateutil-parser-to-rust.md new file mode 100644 index 0000000..7646f28 --- /dev/null +++ b/_posts/2018-06-25-dateutil-parser-to-rust.md @@ -0,0 +1,177 @@ +--- +layout: post +title: "What I Learned: Porting Dateutil Parser to Rust" +description: "" +category: +tags: [dtparse, rust] +--- + +Hi. I'm Bradlee. + +I've mostly been a lurker in Rust for a while, making a couple small contributions here and there. +So launching [dtparse](https://github.com/bspeice/dtparse) feels like nice step towards becoming a +functioning member of society. But not too much, because then you know people start asking you to +pay bills, and ain't nobody got time for that. + +But I built dtparse, and you can read about my thoughts on the process. Or don't. I won't tell you +what to do with your life (but you should totally keep reading). + +# Slow down, what? + +OK, fine, I guess I should start with _why_ someone would do this. + +[Dateutil](https://github.com/dateutil/dateutil) is a Python library for handling dates. The +standard library support for time in Python is kinda dope, but there are a lot of extras that go +into making it useful beyond just the [datetime](https://docs.python.org/3.6/library/datetime.html) +module. `dateutil.parser` specifically is code to take all the super-weird time formats people come +up with and turn them into something actually useful. + +Date/time parsing, it turns out, is just like everything else involving +[computers](https://infiniteundo.com/post/25326999628/falsehoods-programmers-believe-about-time) and +[time](https://infiniteundo.com/post/25509354022/more-falsehoods-programmers-believe-about-time): it +feels like it shouldn't be that difficult to do, until you try to do it, and you realize that people +suck and this is why +[we can't we have nice things](https://zachholman.com/talk/utc-is-enough-for-everyone-right). But +alas, we'll try and make contemporary art out of the rubble and give it a pretentious name like +_Time_. + +![A gravel mound](/assets/images/2018-06-25-gravel-mound.jpg) + +> [Time](https://www.goodfreephotos.com/united-states/montana/elkhorn/remains-of-the-mining-operation-elkhorn.jpg.php) + +What makes `dateutil.parser` great is that there's single function with a single argument that +drives what programmers interact with: +[`parse(timestr)`](https://github.com/dateutil/dateutil/blob/6dde5d6298cfb81a4c594a38439462799ed2aef2/dateutil/parser/_parser.py#L1258). +It takes in the time as a string, and gives you back a reasonable "look, this is the best anyone can +possibly do to make sense of your input" value. It doesn't expect much of you. + +[And now it's in Rust.](https://github.com/bspeice/dtparse/blob/7d565d3a78876dbebd9711c9720364fe9eba7915/src/lib.rs#L1332) + +# Lost in Translation + +Having worked at a bulge-bracket bank watching Java programmers try to be Python programmers, I'm +admittedly hesitant to publish Python code that's trying to be Rust. Interestingly, Rust code can +actually do a great job of mimicking Python. It's certainly not idiomatic Rust, but I've had better +experiences than +[this guy](https://webcache.googleusercontent.com/search?q=cache:wkYMpktJtnUJ:https://jackstouffer.com/blog/porting_dateutil.html+&cd=3&hl=en&ct=clnk&gl=us) +who attempted the same thing for D. These are the actual take-aways: + +When transcribing code, **stay as close to the original library as possible**. I'm talking about +using the same variable names, same access patterns, the whole shebang. It's way too easy to make a +couple of typos, and all of a sudden your code blows up in new and exciting ways. Having a reference +manual for verbatim what your code should be means that you don't spend that long debugging +complicated logic, you're more looking for typos. + +Also, **don't use nice Rust things like enums**. While +[one time it worked out OK for me](https://github.com/bspeice/dtparse/blob/7d565d3a78876dbebd9711c9720364fe9eba7915/src/lib.rs#L88-L94), +I also managed to shoot myself in the foot a couple times because `dateutil` stores AM/PM as a +boolean and I mixed up which was true, and which was false (side note: AM is false, PM is true). In +general, writing nice code _should not be a first-pass priority_ when you're just trying to recreate +the same functionality. + +**Exceptions are a pain.** Make peace with it. Python code is just allowed to skip stack frames. So +when a co-worker told me "Rust is getting try-catch syntax" I properly freaked out. Turns out +[he's not quite right](https://github.com/rust-lang/rfcs/pull/243), and I'm OK with that. And while +`dateutil` is pretty well-behaved about not skipping multiple stack frames, +[130-line try-catch blocks](https://github.com/dateutil/dateutil/blob/16561fc99361979e88cccbd135393b06b1af7e90/dateutil/parser/_parser.py#L730-L865) +take a while to verify. + +As another Python quirk, **be very careful about +[long nested if-elif-else blocks](https://github.com/dateutil/dateutil/blob/16561fc99361979e88cccbd135393b06b1af7e90/dateutil/parser/_parser.py#L494-L568)**. +I used to think that Python's whitespace was just there to get you to format your code correctly. I +think that no longer. It's way too easy to close a block too early and have incredibly weird issues +in the logic. Make sure you use an editor that displays indentation levels so you can keep things +straight. + +**Rust macros are not free.** I originally had the +[main test body](https://github.com/bspeice/dtparse/blob/b0e737f088eca8e83ab4244c6621a2797d247697/tests/compat.rs#L63-L217) +wrapped up in a macro using [pyo3](https://github.com/PyO3/PyO3). It took two minutes to compile. +After +[moving things to a function](https://github.com/bspeice/dtparse/blob/e017018295c670e4b6c6ee1cfff00dbb233db47d/tests/compat.rs#L76-L205) +compile times dropped down to ~5 seconds. Turns out 150 lines \* 100 tests = a lot of redundant code +to be compiled. My new rule of thumb is that any macros longer than 10-15 lines are actually +functions that need to be liberated, man. + +Finally, **I really miss list comprehensions and dictionary comprehensions.** As a quick comparison, +see +[this dateutil code](https://github.com/dateutil/dateutil/blob/16561fc99361979e88cccbd135393b06b1af7e90/dateutil/parser/_parser.py#L476) +and +[the implementation in Rust](https://github.com/bspeice/dtparse/blob/7d565d3a78876dbebd9711c9720364fe9eba7915/src/lib.rs#L619-L629). +I probably wrote it wrong, and I'm sorry. Ultimately though, I hope that these comprehensions can be +added through macros or syntax extensions. Either way, they're expressive, save typing, and are +super-readable. Let's get more of that. + +# Using a young language + +Now, Rust is exciting and new, which means that there's opportunity to make a substantive impact. On +more than one occasion though, I've had issues navigating the Rust ecosystem. + +What I'll call the "canonical library" is still being built. In Python, if you need datetime +parsing, you use `dateutil`. If you want `decimal` types, it's already in the +[standard library](https://docs.python.org/3.6/library/decimal.html). While I might've gotten away +with `f64`, `dateutil` uses decimals, and I wanted to follow the principle of **staying as close to +the original library as possible**. Thus began my quest to find a decimal library in Rust. What I +quickly found was summarized in a comment: + +> Writing a BigDecimal is easy. Writing a _good_ BigDecimal is hard. +> +> [-cmr](https://github.com/rust-lang/rust/issues/8937#issuecomment-34582794) + +In practice, this means that there are at least [4](https://crates.io/crates/bigdecimal) +[different](https://crates.io/crates/rust_decimal) +[implementations](https://crates.io/crates/decimal) [available](https://crates.io/crates/decimate). +And that's a lot of decisions to worry about when all I'm thinking is "why can't +[calendar reform](https://en.wikipedia.org/wiki/Calendar_reform) be a thing" and I'm forced to dig +through a [couple](https://github.com/rust-lang/rust/issues/8937#issuecomment-31661916) +[different](https://github.com/rust-lang/rfcs/issues/334) +[threads](https://github.com/rust-num/num/issues/8) to figure out if the library I'm look at is dead +or just stable. + +And even when the "canonical library" exists, there's no guarantees that it will be well-maintained. +[Chrono](https://github.com/chronotope/chrono) is the _de facto_ date/time library in Rust, and just +released version 0.4.4 like two days ago. Meanwhile, +[chrono-tz](https://github.com/chronotope/chrono-tz) appears to be dead in the water even though +[there are people happy to help maintain it](https://github.com/chronotope/chrono-tz/issues/19). I +know relatively little about it, but it appears that most of the release process is automated; +keeping that up to date should be a no-brainer. + +## Trial Maintenance Policy + +Specifically given "maintenance" being an +[oft-discussed](https://www.reddit.com/r/rust/comments/48540g/thoughts_on_initiators_vs_maintainers/) +issue, I'm going to try out the following policy to keep things moving on `dtparse`: + +1. Issues/PRs needing _maintainer_ feedback will be updated at least weekly. I want to make sure + nobody's blocking on me. + +2. To keep issues/PRs needing _contributor_ feedback moving, I'm going to (kindly) ask the + contributor to check in after two weeks, and close the issue without resolution if I hear nothing + back after a month. + +The second point I think has the potential to be a bit controversial, so I'm happy to receive +feedback on that. And if a contributor responds with "hey, still working on it, had a kid and I'm +running on 30 seconds of sleep a night," then first: congratulations on sustaining human life. And +second: I don't mind keeping those requests going indefinitely. I just want to try and balance +keeping things moving with giving people the necessary time they need. + +I should also note that I'm still getting some best practices in place - CONTRIBUTING and +CONTRIBUTORS files need to be added, as well as issue/PR templates. In progress. None of us are +perfect. + +# Roadmap and Conclusion + +So if I've now built a `dateutil`-compatible parser, we're done, right? Of course not! That's not +nearly ambitious enough. + +Ultimately, I'd love to have a library that's capable of parsing everything the Linux `date` command +can do (and not `date` on OSX, because seriously, BSD coreutils are the worst). I know Rust has a +coreutils rewrite going on, and `dtparse` would potentially be an interesting candidate since it +doesn't bring in a lot of extra dependencies. [`humantime`](https://crates.io/crates/humantime) +could help pick up some of the (current) slack in dtparse, so maybe we can share and care with each +other? + +All in all, I'm mostly hoping that nobody's already done this and I haven't spent a bit over a month +on redundant code. So if it exists, tell me. I need to know, but be nice about it, because I'm going +to take it hard. + +And in the mean time, I'm looking forward to building more. Onwards. diff --git a/_posts/2018-09-01-primitives-in-rust-are-weird.md b/_posts/2018-09-01-primitives-in-rust-are-weird.md new file mode 100644 index 0000000..bcc8ae8 --- /dev/null +++ b/_posts/2018-09-01-primitives-in-rust-are-weird.md @@ -0,0 +1,323 @@ +--- +layout: post +title: "Primitives in Rust are Weird (and Cool)" +description: "but mostly weird." +category: +tags: [rust, c, java, python, x86] +--- + +I wrote a really small Rust program a while back because I was curious. I was 100% convinced it +couldn't possibly run: + +```rust +fn main() { + println!("{}", 8.to_string()) +} +``` + +And to my complete befuddlement, it compiled, ran, and produced a completely sensible output. The +reason I was so surprised has to do with how Rust treats a special category of things I'm going to +call _primitives_. In the current version of the Rust book, you'll see them referred to as +[scalars][rust_scalar], and in older versions they'll be called [primitives][rust_primitive], but +we're going to stick with the name _primitive_ for the time being. Explaining why this program is so +cool requires talking about a number of other programming languages, and keeping a consistent +terminology makes things easier. + +**You've been warned:** this is going to be a tedious post about a relatively minor issue that +involves Java, Python, C, and x86 Assembly. And also me pretending like I know what I'm talking +about with assembly. + +# Defining primitives (Java) + +The reason I'm using the name _primitive_ comes from how much of my life is Java right now. Spoiler +alert: a lot of it. And for the most part I like Java, but I digress. In Java, there's a special +name for some specific types of values: + +> ``` +> bool char byte +> short int long +> float double +> ``` + +```` + +They are referred to as [primitives][java_primitive]. And relative to the other bits of Java, +they have two unique features. First, they don't have to worry about the +[billion-dollar mistake](https://en.wikipedia.org/wiki/Tony_Hoare#Apologies_and_retractions); +primitives in Java can never be `null`. Second: *they can't have instance methods*. +Remember that Rust program from earlier? Java has no idea what to do with it: + +```java +class Main { + public static void main(String[] args) { + int x = 8; + System.out.println(x.toString()); // Triggers a compiler error + } +} +```` + +The error is: + +``` +Main.java:5: error: int cannot be dereferenced + System.out.println(x.toString()); + ^ +1 error +``` + +Specifically, Java's [`Object`](https://docs.oracle.com/javase/10/docs/api/java/lang/Object.html) +and things that inherit from it are pointers under the hood, and we have to dereference them before +the fields and methods they define can be used. In contrast, _primitive types are just values_ - +there's nothing to be dereferenced. In memory, they're just a sequence of bits. + +If we really want, we can turn the `int` into an +[`Integer`](https://docs.oracle.com/javase/10/docs/api/java/lang/Integer.html) and then dereference +it, but it's a bit wasteful: + +```java +class Main { + public static void main(String[] args) { + int x = 8; + Integer y = Integer.valueOf(x); + System.out.println(y.toString()); + } +} +``` + +This creates the variable `y` of type `Integer` (which inherits `Object`), and at run time we +dereference `y` to locate the `toString()` function and call it. Rust obviously handles things a bit +differently, but we have to dig into the low-level details to see it in action. + +# Low Level Handling of Primitives (C) + +We first need to build a foundation for reading and understanding the assembly code the final answer +requires. Let's begin with showing how the `C` language (and your computer) thinks about "primitive" +values in memory: + +```c +void my_function(int num) {} + +int main() { + int x = 8; + my_function(x); +} +``` + +The [compiler explorer](https://godbolt.org/z/lgNYcc) gives us an easy way of showing off the +assembly-level code that's generated: whose output has been lightly +edited + +```nasm +main: + push rbp + mov rbp, rsp + sub rsp, 16 + + ; We assign the value `8` to `x` here + mov DWORD PTR [rbp-4], 8 + + ; And copy the bits making up `x` to a location + ; `my_function` can access (`edi`) + mov eax, DWORD PTR [rbp-4] + mov edi, eax + + ; Call `my_function` and give it control + call my_function + + mov eax, 0 + leave + ret + +my_function: + push rbp + mov rbp, rsp + + ; Copy the bits out of the pre-determined location (`edi`) + ; to somewhere we can use + mov DWORD PTR [rbp-4], edi + nop + + pop rbp + ret +``` + +At a really low level of memory, we're copying bits around using the [`mov`][x86_guide] instruction; +nothing crazy. But to show how similar Rust is, let's take a look at our program translated from C +to Rust: + +```rust +fn my_function(x: i32) {} + +fn main() { + let x = 8; + my_function(x) +} +``` + +And the assembly generated when we stick it in the +[compiler explorer](https://godbolt.org/z/cAlmk0): again, lightly +edited + +```nasm +example::main: + push rax + + ; Look familiar? We're copying bits to a location for `my_function` + ; The compiler just optimizes out holding `x` in memory + mov edi, 8 + + ; Call `my_function` and give it control + call example::my_function + + pop rax + ret + +example::my_function: + sub rsp, 4 + + ; And copying those bits again, just like in C + mov dword ptr [rsp], edi + + add rsp, 4 + ret +``` + +The generated Rust assembly is functionally pretty close to the C assembly: _When working with +primitives, we're just dealing with bits in memory_. + +In Java we have to dereference a pointer to call its functions; in Rust, there's no pointer to +dereference. So what exactly is going on with this `.to_string()` function call? + +# impl primitive (and Python) + +Now it's time to reveal my trap card show the revelation that tied all this +together: _Rust has implementations for its primitive types._ That's right, `impl` blocks aren't +only for `structs` and `traits`, primitives get them too. Don't believe me? Check out +[u32](https://doc.rust-lang.org/std/primitive.u32.html), +[f64](https://doc.rust-lang.org/std/primitive.f64.html) and +[char](https://doc.rust-lang.org/std/primitive.char.html) as examples. + +But the really interesting bit is how Rust turns those `impl` blocks into assembly. Let's break out +the [compiler explorer](https://godbolt.org/z/6LBEwq) once again: + +```rust +pub fn main() { + 8.to_string() +} +``` + +And the interesting bits in the assembly: heavily trimmed down + +```nasm +example::main: + sub rsp, 24 + mov rdi, rsp + lea rax, [rip + .Lbyte_str.u] + mov rsi, rax + + ; Cool stuff right here + call ::to_string@PLT + + mov rdi, rsp + call core::ptr::drop_in_place + add rsp, 24 + ret +``` + +Now, this assembly is a bit more complicated, but here's the big revelation: **we're calling +`to_string()` as a function that exists all on its own, and giving it the instance of `8`**. Instead +of thinking of the value 8 as an instance of `u32` and then peeking in to find the location of the +function we want to call (like Java), we have a function that exists outside of the instance and +just give that function the value `8`. + +This is an incredibly technical detail, but the interesting idea I had was this: _if `to_string()` +is a static function, can I refer to the unbound function and give it an instance?_ + +Better explained in code (and a [compiler explorer](https://godbolt.org/z/fJY-gA) link because I +seriously love this thing): + +```rust +struct MyVal { + x: u32 +} + +impl MyVal { + fn to_string(&self) -> String { + self.x.to_string() + } +} + +pub fn main() { + let my_val = MyVal { x: 8 }; + + // THESE ARE THE SAME + my_val.to_string(); + MyVal::to_string(&my_val); +} +``` + +Rust is totally fine "binding" the function call to the instance, and also as a static. + +MIND == BLOWN. + +Python does the same thing where I can both call functions bound to their instances and also call as +an unbound function where I give it the instance: + +```python +class MyClass(): + x = 24 + + def my_function(self): + print(self.x) + +m = MyClass() + +m.my_function() +MyClass.my_function(m) +``` + +And Python tries to make you _think_ that primitives can have instance methods... + +```python +>>> dir(8) +['__abs__', '__add__', '__and__', '__class__', '__cmp__', '__coerce__', +'__delattr__', '__div__', '__divmod__', '__doc__', '__float__', '__floordiv__', +... +'__setattr__', '__sizeof__', '__str__', '__sub__', '__subclasshook__', '__truediv__', +...] + +>>> # Theoretically `8.__str__()` should exist, but: + +>>> 8.__str__() + File "", line 1 + 8.__str__() + ^ +SyntaxError: invalid syntax + +>>> # It will run if we assign it first though: +>>> x = 8 +>>> x.__str__() +'8' +``` + +...but in practice it's a bit complicated. + +So while Python handles binding instance methods in a way similar to Rust, it's still not able to +run the example we started with. + +# Conclusion + +This was a super-roundabout way of demonstrating it, but the way Rust handles incredibly minor +details like primitives leads to really cool effects. Primitives are optimized like C in how they +have a space-efficient memory layout, yet the language still has a lot of features I enjoy in Python +(like both instance and late binding). + +And when you put it together, there are areas where Rust does cool things nobody else can; as a +quirky feature of Rust's type system, `8.to_string()` is actually valid code. + +Now go forth and fool your friends into thinking you know assembly. This is all I've got. + +[x86_guide]: http://www.cs.virginia.edu/~evans/cs216/guides/x86.html +[java_primitive]: https://docs.oracle.com/javase/tutorial/java/nutsandbolts/datatypes.html +[rust_scalar]: https://doc.rust-lang.org/book/second-edition/ch03-02-data-types.html#scalar-types +[rust_primitive]: https://doc.rust-lang.org/book/first-edition/primitive-types.html diff --git a/_posts/2018-09-15-isomorphic-apps.md b/_posts/2018-09-15-isomorphic-apps.md new file mode 100644 index 0000000..abc0dcb --- /dev/null +++ b/_posts/2018-09-15-isomorphic-apps.md @@ -0,0 +1,294 @@ +--- +layout: post +title: "Isomorphic Desktop Apps with Rust" +description: "Electron + WASM = ☣" +category: +tags: [rust, javascript, webassembly] +--- + +Forgive me, but this is going to be a bit of a schizophrenic post. I both despise Javascript and the +modern ECMAScript ecosystem, and I'm stunned by its success doing some really cool things. It's +[this duality](https://www.destroyallsoftware.com/talks/the-birth-and-death-of-javascript) that's +led me to a couple of (very) late nights over the past weeks trying to reconcile myself as I +bootstrap a simple desktop application. + +See, as much as +[Webassembly isn't trying to replace Javascript](https://webassembly.org/docs/faq/#is-webassembly-trying-to-replace-javascript), +**I want Javascript gone**. There are plenty of people who don't share my views, and they are +probably nicer and more fun at parties. But I cringe every time "Webpack" is mentioned, and I think +it's hilarious that the +[language specification](https://ecma-international.org/publications/standards/Ecma-402.htm) +dramatically outpaces anyone's +[actual implementation](https://kangax.github.io/compat-table/es2016plus/). The answer to this +conundrum is of course to recompile code from newer versions of the language to older versions _of +the same language_ before running. At least [Babel] is a nice tongue-in-cheek reference. + +Yet for as much hate as [Electron] receives, it does a stunningly good job at solving a really hard +problem: _how the hell do I put a button on the screen and react when the user clicks it_? GUI +programming is hard, straight up. But if browsers are already able to run everywhere, why don't we +take advantage of someone else solving the hard problems for us? I don't like that I have to use +Javascript for it, but I really don't feel inclined to whip out good ol' [wxWidgets]. + +Now there are other native solutions ([libui-rs], [conrod], [oh hey wxWdidgets again!][wxrust]), but +those also have their own issues with distribution, styling, etc. With Electron, I can +`yarn create electron-app my-app` and just get going, knowing that packaging/upgrades/etc. are built +in. + +My question is: given recent innovations with WASM, _are we Electron yet_? + +No, not really. + +Instead, **what would it take to get to a point where we can skip Javascript in Electron apps?** + +# Setting the Stage + +Truth is, WASM/Webassembly is a pretty new technology and I'm a total beginner in this area. There +may already be solutions to the issues I discuss, but I'm totally unaware of them, so I'm going to +try and organize what I did manage to discover. + +I should also mention that the content and things I'm talking about here are not intended to be +prescriptive, but more "if someone else is interested, what do we already know doesn't work?" _I +expect everything in this post to be obsolete within two months._ Even over the course of writing +this, [a separate blog post](https://mnt.io/2018/08/28/from-rust-to-beyond-the-asm-js-galaxy/) had +to be modified because [upstream changes](https://github.com/WebAssembly/binaryen/pull/1642) broke a +[Rust tool](https://github.com/rustwasm/wasm-bindgen/pull/787) the post tried to use. The post +ultimately +[got updated](https://mnt.io/2018/08/28/from-rust-to-beyond-the-asm-js-galaxy/#comment-477), **but +all this happened within the span of a week.** Things are moving quickly. + +I'll also note that we're going to skip [asm.js] and [emscripten]. Truth be told, I couldn't get +either of these to output anything, and so I'm just going to say +[here be dragons.](https://en.wikipedia.org/wiki/Here_be_dragons) Everything I'm discussing here +uses the `wasm32-unknown-unknown` target. + +The code that I _did_ get running is available +[over here](https://github.com/speice-io/isomorphic-rust). Feel free to use it as a starting point, +but I'm mostly including the link as a reference for the things that were attempted. + +# An Example Running Application + +So, I did _technically_ get a running application: + +![Electron app using WASM](/assets/images/2018-09-15-electron-percy-wasm.png) + +...which you can also try out if you want: + +```sh +git clone https://github.com/speice-io/isomorphic-rust.git +cd isomorphic_rust/percy +yarn install && yarn start +``` + +...but I wouldn't really call it a "high quality" starting point to base future work on. It's mostly +there to prove this is possible in the first place. And that's something to be proud of! There's a +huge amount of engineering that went into showing a window with the text "It's alive!". + +There's also a lot of usability issues that prevent me from recommending anyone try Electron and +WASM apps at the moment, and I think that's the more important thing to discuss. + +# Issue the First: Complicated Toolchains + +I quickly established that [wasm-bindgen] was necessary to "link" my Rust code to Javascript. At +that point you've got an Electron app that starts an HTML page which ultimately fetches your WASM +blob. To keep things simple, the goal was to package everything using [webpack] so that I could just +load a `bundle.js` file on the page. That decision was to be the last thing that kinda worked in +this process. + +The first issue +[I ran into](https://www.reddit.com/r/rust/comments/98lpun/unable_to_load_wasm_for_electron_application/) +while attempting to bundle everything via `webpack` is a detail in the WASM spec: + +> This function accepts a Response object, or a promise for one, and ... **[if > it] does not match +> the `application/wasm` MIME type**, the returned promise will be rejected with a TypeError; +> +> [WebAssembly - Additional Web Embedding API](https://webassembly.org/docs/web/#additional-web-embedding-api) + +Specifically, if you try and load a WASM blob without the MIME type set, you'll get an error. On the +web this isn't a huge issue, as the server can set MIME types when delivering the blob. With +Electron, you're resolving things with a `file://` URL and thus can't control the MIME type: + +![TypeError: Incorrect response MIME type. Expected 'application/wasm'.](/assets/images/2018-09-15-incorrect-MIME-type.png) + +There are a couple of solutions depending on how far into the deep end you care to venture: + +- Embed a static file server in your Electron application +- Use a [custom protocol](https://electronjs.org/docs/api/protocol) and custom protocol handler +- Host your WASM blob on a website that you resolve at runtime + +But all these are pretty bad solutions and defeat the purpose of using WASM in the first place. +Instead, my workaround was to +[open a PR with `webpack`](https://github.com/webpack/webpack/issues/7918) and use regex to remove +calls to `instantiateStreaming` in the +[build script](https://github.com/speice-io/isomorphic-rust/blob/master/percy/build.sh#L21-L25): + +```sh +cargo +nightly build --target=wasm32-unknown-unknown && \ + wasm-bindgen "$WASM_DIR/debug/$WASM_NAME.wasm" --out-dir "$APP_DIR" --no-typescript && \ + # Have to use --mode=development so we can patch out the call to instantiateStreaming + "$DIR/node_modules/webpack-cli/bin/cli.js" --mode=development "$APP_DIR/app_loader.js" -o "$APP_DIR/bundle.js" && \ + sed -i 's/.*instantiateStreaming.*//g' "$APP_DIR/bundle.js" +``` + +Once that lands, the +[build process](https://github.com/speice-io/isomorphic-rust/blob/master/percy_patched_webpack/build.sh#L24-L27) +becomes much simpler: + +```sh + +cargo +nightly build --target=wasm32-unknown-unknown && \ + wasm-bindgen "$WASM_DIR/debug/$WASM_NAME.wasm" --out-dir "$APP_DIR" --no-typescript && \ + "$DIR/node_modules/webpack-cli/bin/cli.js" --mode=production "$APP_DIR/app_loader.js" -o "$APP_DIR/bundle.js" +``` + +But we're not done yet! After we compile Rust into WASM and link WASM to Javascript (via +`wasm-bindgen` and `webpack`), we still have to make an Electron app. For this purpose I used a +starter app from [Electron Forge], and then a +[`prestart` script](https://github.com/speice-io/isomorphic-rust/blob/master/percy/package.json#L8) +to actually handle starting the application. + +The +[final toolchain](https://github.com/speice-io/isomorphic-rust/blob/master/percy/package.json#L8) +looks something like this: + +- `yarn start` triggers the `prestart` script +- `prestart` checks for missing tools (`wasm-bindgen-cli`, etc.) and then: + - Uses `cargo` to compile the Rust code into WASM + - Uses `wasm-bindgen` to link the WASM blob into a Javascript file with exported symbols + - Uses `webpack` to bundle the page start script with the Javascript we just generated + - Uses `babel` under the hood to compile the `wasm-bindgen` code down from ES6 into something + browser-compatible +- The `start` script runs an Electron Forge handler to do some sanity checks +- Electron actually starts + +...which is complicated. I think more work needs to be done to either build a high-quality starter +app that can manage these steps, or another tool that "just handles" the complexity of linking a +compiled WASM file into something the Electron browser can run. + +# Issue the Second: WASM tools in Rust + +For as much as I didn't enjoy the Javascript tooling needed to interface with Rust, the Rust-only +bits aren't any better at the moment. I get it, a lot of projects are just starting off, and that +leads to a fragmented ecosystem. Here's what I can recommend as a starting point: + +Don't check in your `Cargo.lock` files to version control. If there's a disagreement between the +version of `wasm-bindgen-cli` you have installed and the `wasm-bindgen` you're compiling with in +`Cargo.lock`, you get a nasty error: + +``` +it looks like the Rust project used to create this wasm file was linked against +a different version of wasm-bindgen than this binary: + +rust wasm file: 0.2.21 + this binary: 0.2.17 + +Currently the bindgen format is unstable enough that these two version must +exactly match, so it's required that these two version are kept in sync by +either updating the wasm-bindgen dependency or this binary. +``` + +Not that I ever managed to run into this myself (_coughs nervously_). + +There are two projects attempting to be "application frameworks": [percy] and [yew]. Between those, +I managed to get [two](https://github.com/speice-io/isomorphic-rust/tree/master/percy) +[examples](https://github.com/speice-io/isomorphic-rust/tree/master/percy_patched_webpack) running +using `percy`, but was unable to get an +[example](https://github.com/speice-io/isomorphic-rust/tree/master/yew) running with `yew` because +of issues with "missing modules" during the `webpack` step: + +```sh +ERROR in ./dist/electron_yew_wasm_bg.wasm +Module not found: Error: Can't resolve 'env' in '/home/bspeice/Development/isomorphic_rust/yew/dist' + @ ./dist/electron_yew_wasm_bg.wasm + @ ./dist/electron_yew_wasm.js + @ ./dist/app.js + @ ./dist/app_loader.js +``` + +If you want to work with the browser APIs directly, your choices are [percy-webapis] or [stdweb] (or +eventually [web-sys]). See above for my `percy` examples, but when I tried +[an example with `stdweb`](https://github.com/speice-io/isomorphic-rust/tree/master/stdweb), I was +unable to get it running: + +```sh +ERROR in ./dist/stdweb_electron_bg.wasm +Module not found: Error: Can't resolve 'env' in '/home/bspeice/Development/isomorphic_rust/stdweb/dist' + @ ./dist/stdweb_electron_bg.wasm + @ ./dist/stdweb_electron.js + @ ./dist/app_loader.js +``` + +At this point I'm pretty convinced that `stdweb` is causing issues for `yew` as well, but can't +prove it. + +I did also get a [minimal example](https://github.com/speice-io/isomorphic-rust/tree/master/minimal) +running that doesn't depend on any tools besides `wasm-bindgen`. However, it requires manually +writing "`extern C`" blocks for everything you need from the browser. Es no bueno. + +Finally, from a tools and platform view, there are two up-and-coming packages that should be +mentioned: [js-sys] and [web-sys]. Their purpose is to be fundamental building blocks that exposes +the browser's APIs to Rust. If you're interested in building an app framework from scratch, these +should give you the most flexibility. I didn't touch either in my research, though I expect them to +be essential long-term. + +So there's a lot in play from the Rust side of things, and it's just going to take some time to +figure out what works and what doesn't. + +# Issue the Third: Known Unknowns + +Alright, so after I managed to get an application started, I stopped there. It was a good deal of +effort to chain together even a proof of concept, and at this point I'd rather learn [Typescript] +than keep trying to maintain an incredibly brittle pipeline. Blasphemy, I know... + +The important point I want to make is that there's a lot unknown about how any of this holds up +outside proofs of concept. Things I didn't attempt: + +- Testing +- Packaging +- Updates +- Literally anything related to why I wanted to use Electron in the first place + +# What it Would Take + +Much as I don't like Javascript, the tools are too shaky for me to recommend mixing Electron and +WASM at the moment. There's a lot of innovation happening, so who knows? Someone might have an +application in production a couple months from now. But at the moment, I'm personally going to stay +away. + +Let's finish with a wishlist then - here are the things that I think need to happen before +Electron/WASM/Rust can become a thing: + +- Webpack still needs some updates. The necessary work is in progress, but hasn't landed yet + ([#7983](https://github.com/webpack/webpack/pull/7983)) +- Browser API libraries (`web-sys` and `stdweb`) need to make sure they can support running in + Electron (see module error above) +- Projects need to stabilize. There's talk of `stdweb` being turned into a Rust API + [on top of web-sys](https://github.com/rustwasm/team/issues/226#issuecomment-418475778), and percy + [moving to web-sys](https://github.com/chinedufn/percy/issues/24), both of which are big changes +- `wasm-bindgen` is great, but still in the "move fast and break things" phase +- A good "boilerplate" app would dramatically simplify the start-up costs; + [electron-react-boilerplate](https://github.com/chentsulin/electron-react-boilerplate) comes to + mind as a good project to imitate +- More blog posts/contributors! I think Electron + Rust could be cool, but I have no idea what I'm + doing + +[wxwidgets]: https://wxwidgets.org/ +[libui-rs]: https://github.com/LeoTindall/libui-rs/ +[electron]: https://electronjs.org/ +[babel]: https://babeljs.io/ +[wxrust]: https://github.com/kenz-gelsoft/wxRust +[wasm-bindgen]: https://github.com/rustwasm/wasm-bindgen +[js-sys]: https://crates.io/crates/js-sys +[percy-webapis]: https://crates.io/crates/percy-webapis +[stdweb]: https://crates.io/crates/stdweb +[web-sys]: https://crates.io/crates/web-sys +[percy]: https://chinedufn.github.io/percy/ +[virtual-dom-rs]: https://crates.io/crates/virtual-dom-rs +[yew]: https://github.com/DenisKolodin/yew +[react]: https://reactjs.org/ +[elm]: http://elm-lang.org/ +[asm.js]: http://asmjs.org/ +[emscripten]: https://kripken.github.io/emscripten-site/ +[typescript]: https://www.typescriptlang.org/ +[electron forge]: https://electronforge.io/ +[conrod]: https://github.com/PistonDevelopers/conrod +[webpack]: https://webpack.js.org/ diff --git a/_posts/2018-10-08-case-study-optimization.md b/_posts/2018-10-08-case-study-optimization.md new file mode 100644 index 0000000..00f24ae --- /dev/null +++ b/_posts/2018-10-08-case-study-optimization.md @@ -0,0 +1,168 @@ +--- +layout: post +title: "A Case Study in Heaptrack" +description: "...because you don't need no garbage collection" +category: +tags: [] +--- + +One of my earliest conversations about programming went like this: + +> Programmers have it too easy these days. They should learn to develop in low memory environments +> and be more efficient. +> +> -- My Father (paraphrased) + +...though it's not like the first code I wrote was for a +[graphing calculator](https://education.ti.com/en/products/calculators/graphing-calculators/ti-84-plus-se) +packing a whole 24KB of RAM. By the way, _what are you doing on my lawn?_ + +The principle remains though: be efficient with the resources you have, because +[what Intel giveth, Microsoft taketh away](http://exo-blog.blogspot.com/2007/09/what-intel-giveth-microsoft-taketh-away.html). +My professional work is focused on this kind of efficiency; low-latency financial markets demand +that you understand at a deep level _exactly_ what your code is doing. As I continue experimenting +with Rust for personal projects, it's exciting to bring a utilitarian mindset with me: there's +flexibility for the times I pretend to have a garbage collector, and flexibility for the times that +I really care about how memory is used. + +This post is a (small) case study in how I went from the former to the latter. And ultimately, it's +intended to be a starting toolkit to empower analysis of your own code. + +# Curiosity + +When I first started building the [dtparse] crate, my intention was to mirror as closely as possible +the equivalent [Python library][dateutil]. Python, as you may know, is garbage collected. Very +rarely is memory usage considered in Python, and I likewise wasn't paying too much attention when +`dtparse` was first being built. + +This lackadaisical approach to memory works well enough, and I'm not planning on making `dtparse` +hyper-efficient. But every so often, I've wondered: "what exactly is going on in memory?" With the +advent of Rust 1.28 and the +[Global Allocator trait](https://doc.rust-lang.org/std/alloc/trait.GlobalAlloc.html), I had a really +great idea: _build a custom allocator that allows you to track your own allocations._ That way, you +can do things like writing tests for both correct results and correct memory usage. I gave it a +[shot][qadapt], but learned very quickly: **never write your own allocator**. It went from "fun +weekend project" to "I have literally no idea what my computer is doing" at breakneck speed. + +Instead, I'll highlight a separate path I took to make sense of my memory usage: [heaptrack]. + +# Turning on the System Allocator + +This is the hardest part of the post. Because Rust uses +[its own allocator](https://github.com/rust-lang/rust/pull/27400#issue-41256384) by default, +`heaptrack` is unable to properly record unmodified Rust code. To remedy this, we'll make use of the +`#[global_allocator]` attribute. + +Specifically, in `lib.rs` or `main.rs`, add this: + +```rust +use std::alloc::System; + +#[global_allocator] +static GLOBAL: System = System; +``` + +...and that's it. Everything else comes essentially for free. + +# Running heaptrack + +Assuming you've installed heaptrack (Homebrew in Mac, package manager +in Linux, ??? in Windows), all that's left is to fire up your application: + +``` +heaptrack my_application +``` + +It's that easy. After the program finishes, you'll see a file in your local directory with a name +like `heaptrack.my_appplication.XXXX.gz`. If you load that up in `heaptrack_gui`, you'll see +something like this: + +![heaptrack](/assets/images/2018-10-heaptrack/heaptrack-before.png) + +--- + +And even these pretty colors: + +![pretty colors](/assets/images/2018-10-heaptrack/heaptrack-flamegraph.png) + +# Reading Flamegraphs + +To make sense of our memory usage, we're going to focus on that last picture - it's called a +["flamegraph"](http://www.brendangregg.com/flamegraphs.html). These charts are typically used to +show how much time your program spends executing each function, but they're used here to show how +much memory was allocated during those functions instead. + +For example, we can see that all executions happened during the `main` function: + +![allocations in main](/assets/images/2018-10-heaptrack/heaptrack-main-colorized.png) + +...and within that, all allocations happened during `dtparse::parse`: + +![allocations in dtparse](/assets/images/2018-10-heaptrack/heaptrack-dtparse-colorized.png) + +...and within _that_, allocations happened in two different places: + +![allocations in parseinfo](/assets/images/2018-10-heaptrack/heaptrack-parseinfo-colorized.png) + +Now I apologize that it's hard to see, but there's one area specifically that stuck out as an issue: +**what the heck is the `Default` thing doing?** + +![pretty colors](/assets/images/2018-10-heaptrack/heaptrack-flamegraph-default.png) + +# Optimizing dtparse + +See, I knew that there were some allocations during calls to `dtparse::parse`, but I was totally +wrong about where the bulk of allocations occurred in my program. Let me post the code and see if +you can spot the mistake: + +```rust +/// Main entry point for using `dtparse`. +pub fn parse(timestr: &str) -> ParseResult<(NaiveDateTime, Option)> { + let res = Parser::default().parse( + timestr, None, None, false, false, + None, false, + &HashMap::new(), + )?; + + Ok((res.0, res.1)) +} +``` + +> [dtparse](https://github.com/bspeice/dtparse/blob/4d7c5dd99572823fa4a390b483c38ab020a2172f/src/lib.rs#L1286) + +--- + +Because `Parser::parse` requires a mutable reference to itself, I have to create a new +`Parser::default` every time it receives a string. This is excessive! We'd rather have an immutable +parser that can be re-used, and avoid allocating memory in the first place. + +Armed with that information, I put some time in to +[make the parser immutable](https://github.com/bspeice/dtparse/commit/741afa34517d6bc1155713bbc5d66905fea13fad#diff-b4aea3e418ccdb71239b96952d9cddb6). +Now that I can re-use the same parser over and over, the allocations disappear: + +![allocations cleaned up](/assets/images/2018-10-heaptrack/heaptrack-flamegraph-after.png) + +In total, we went from requiring 2 MB of memory in +[version 1.0.2](https://crates.io/crates/dtparse/1.0.2): + +![memory before](/assets/images/2018-10-heaptrack/heaptrack-closeup.png) + +All the way down to 300KB in [version 1.0.3](https://crates.io/crates/dtparse/1.0.3): + +![memory after](/assets/images/2018-10-heaptrack/heaptrack-closeup-after.png) + +# Conclusion + +In the end, you don't need to write a custom allocator to be efficient with memory, great tools +already exist to help you understand what your program is doing. + +**Use them.** + +Given that [Moore's Law](https://en.wikipedia.org/wiki/Moore%27s_law) is +[dead](https://www.technologyreview.com/s/601441/moores-law-is-dead-now-what/), we've all got to do +our part to take back what Microsoft stole. + +[dtparse]: https://crates.io/crates/dtparse +[dateutil]: https://github.com/dateutil/dateutil +[heaptrack]: https://github.com/KDE/heaptrack +[qadapt]: https://crates.io/crates/qadapt diff --git a/_posts/2018-12-04-what-small-business-really-means.md b/_posts/2018-12-04-what-small-business-really-means.md new file mode 100644 index 0000000..dce374d --- /dev/null +++ b/_posts/2018-12-04-what-small-business-really-means.md @@ -0,0 +1,34 @@ +--- +layout: post +title: 'More "What Companies Really Mean"' +description: 'when they ask "Why should we hire you?"' +category: +tags: [] +--- + +I recently stumbled across a phenomenal small article entitled +[What Startups Really Mean By "Why Should We Hire You?"](https://angel.co/blog/what-startups-really-mean-by-why-should-we-hire-you). +Having been interviewed by smaller companies (though not exactly startups), the questions and +subtexts are the same. There's often a question behind the question that you're actually trying to +answer, and I wish I spotted the nuance earlier in my career. + +Let me also make note of one more question/euphemism I've come across: + +# How do you feel about Production Support? + +**Translation**: _We're a fairly small team, and when things break on an evening/weekend/Christmas +Day, can we call on you to be there?_ + +I've met decidedly few people in my life who truly enjoy the "ops" side of "devops". They're +incredibly good at taking an impossible problem, pre-existing knowledge of arcane arts, and turning +that into a functioning system at the end. And if they all left for lunch, we probably wouldn't make +it out the door before the zombie apocalypse. + +Larger organizations (in my experience, 500+ person organizations) have the luxury of hiring people +who either enjoy that, or play along nicely enough that our systems keep working. + +Small teams have no such luck. If you're interviewing at a small company, especially as a "data +scientist" or other somesuch position, be aware that systems can and do spontaneously combust at the +most inopportune moments. + +**Terrible-but-popular answers include**: _It's a part of the job, and I'm happy to contribute._ diff --git a/_posts/2018-12-15-allocation-safety.md b/_posts/2018-12-15-allocation-safety.md new file mode 100644 index 0000000..7892856 --- /dev/null +++ b/_posts/2018-12-15-allocation-safety.md @@ -0,0 +1,218 @@ +--- +layout: post +title: "QADAPT - debug_assert! for your memory usage" +description: "...and why you want an allocator that goes 💥." +category: +tags: [] +--- + +I think it's part of the human condition to ignore perfectly good advice when it comes our way. A +bit over a month ago, I was dispensing sage wisdom for the ages: + +> I had a really great idea: build a custom allocator that allows you to track your own allocations. +> I gave it a shot, but learned very quickly: **never write your own allocator.** +> +> -- [me](/2018/10/case-study-optimization.html) + +I proceeded to ignore it, because we never really learn from our mistakes. + +There's another part of the human condition that derives joy from seeing things explode. + + + +And _that's_ the part I'm going to focus on. + +# Why an Allocator? + +So why, after complaining about allocators, would I still want to write one? There are three reasons +for that: + +1. Allocation/dropping is slow +2. It's difficult to know exactly when Rust will allocate or drop, especially when using code that + you did not write +3. I want automated tools to verify behavior, instead of inspecting by hand + +When I say "slow," it's important to define the terms. If you're writing web applications, you'll +spend orders of magnitude more time waiting for the database than you will the allocator. However, +there's still plenty of code where micro- or nano-seconds matter; think +[finance](https://www.youtube.com/watch?v=NH1Tta7purM), +[real-time audio](https://www.reddit.com/r/rust/comments/9hg7yj/synthesizer_progress_update/e6c291f), +[self-driving cars](https://polysync.io/blog/session-types-for-hearty-codecs/), and +[networking](https://carllerche.github.io/bytes/bytes/index.html). In these situations it's simply +unacceptable for you to spend time doing things that are not your program, and waiting on the +allocator is not cool. + +As I continue to learn Rust, it's difficult for me to predict where exactly allocations will happen. +So, I propose we play a quick trivia game: **Does this code invoke the allocator?** + +## Example 1 + +```rust +fn my_function() { + let v: Vec = Vec::new(); +} +``` + +**No**: Rust [knows how big](https://doc.rust-lang.org/std/mem/fn.size_of.html) the `Vec` type is, +and reserves a fixed amount of memory on the stack for the `v` vector. However, if we wanted to +reserve extra space (using `Vec::with_capacity`) the allocator would get invoked. + +## Example 2 + +```rust +fn my_function() { + let v: Box> = Box::new(Vec::new()); +} +``` + +**Yes**: Because Boxes allow us to work with things that are of unknown size, it has to allocate on +the heap. While the `Box` is unnecessary in this snippet (release builds will optimize out the +allocation), reserving heap space more generally is needed to pass a dynamically sized type to +another function. + +## Example 3 + +```rust +fn my_function(v: Vec) { + v.push(5); +} +``` + +**Maybe**: Depending on whether the Vector we were given has space available, we may or may not +allocate. Especially when dealing with code that you did not author, it's difficult to verify that +things behave as you expect them to. + +# Blowing Things Up + +So, how exactly does QADAPT solve these problems? **Whenever an allocation or drop occurs in code +marked allocation-safe, QADAPT triggers a thread panic.** We don't want to let the program continue +as if nothing strange happened, _we want things to explode_. + +However, you don't want code to panic in production because of circumstances you didn't predict. +Just like [`debug_assert!`](https://doc.rust-lang.org/std/macro.debug_assert.html), **QADAPT will +strip out its own code when building in release mode to guarantee no panics and no performance +impact.** + +Finally, there are three ways to have QADAPT check that your code will not invoke the allocator: + +## Using a procedural macro + +The easiest method, watch an entire function for allocator invocation: + +```rust +use qadapt::no_alloc; +use qadapt::QADAPT; + +#[global_allocator] +static Q: QADAPT = QADAPT; + +#[no_alloc] +fn push_vec(v: &mut Vec) { + // This triggers a panic if v.len() == v.capacity() + v.push(5); +} + +fn main() { + let v = Vec::with_capacity(1); + + // This will *not* trigger a panic + push_vec(&v); + + // This *will* trigger a panic + push_vec(&v); +} +``` + +## Using a regular macro + +For times when you need more precision: + +```rust +use qadapt::assert_no_alloc; +use qadapt::QADAPT; + +#[global_allocator] +static Q: QADAPT = QADAPT; + +fn main() { + let v = Vec::with_capacity(1); + + // No allocations here, we already have space reserved + assert_no_alloc!(v.push(5)); + + // Even though we remove an item, it doesn't trigger a drop + // because it's a scalar. If it were a `Box<_>` type, + // a drop would trigger. + assert_no_alloc!({ + v.pop().unwrap(); + }); +} +``` + +## Using function calls + +Both the most precise and most tedious: + +```rust +use qadapt::enter_protected; +use qadapt::exit_protected; +use qadapt::QADAPT; + +#[global_allocator] +static Q: QADAPT = QADAPT; + +fn main() { + // This triggers an allocation (on non-release builds) + let v = Vec::with_capacity(1); + + enter_protected(); + // This does not trigger an allocation because we've reserved size + v.push(0); + exit_protected(); + + // This triggers an allocation because we ran out of size, + // but doesn't panic because we're no longer protected. + v.push(1); +} +``` + +## Caveats + +It's important to point out that QADAPT code is synchronous, so please be careful when mixing in +asynchronous functions: + +```rust +use futures::future::Future; +use futures::future::ok; + +#[no_alloc] +fn async_capacity() -> impl Future, Error=()> { + ok(12).and_then(|e| Ok(Vec::with_capacity(e))) +} + +fn main() { + // This doesn't trigger a panic because the `and_then` closure + // wasn't run during the function call. + async_capacity(); + + // Still no panic + assert_no_alloc!(async_capacity()); + + // This will panic because the allocation happens during `unwrap` + // in the `assert_no_alloc!` macro + assert_no_alloc!(async_capacity().poll().unwrap()); +} +``` + +# Conclusion + +While there's a lot more to writing high-performance code than managing your usage of the allocator, +it's critical that you do use the allocator correctly. QADAPT will verify that your code is doing +what you expect. It's usable even on stable Rust from version 1.31 onward, which isn't the case for +most allocators. Version 1.0 was released today, and you can check it out over at +[crates.io](https://crates.io/crates/qadapt) or on [github](https://github.com/bspeice/qadapt). + +I'm hoping to write more about high-performance Rust in the future, and I expect that QADAPT will +help guide that. If there are topics you're interested in, let me know in the comments below! + +[qadapt]: https://crates.io/crates/qadapt diff --git a/_posts/2019-02-04-understanding-allocations-in-rust.md b/_posts/2019-02-04-understanding-allocations-in-rust.md new file mode 100644 index 0000000..48b9df6 --- /dev/null +++ b/_posts/2019-02-04-understanding-allocations-in-rust.md @@ -0,0 +1,113 @@ +--- +layout: post +title: "Allocations in Rust" +description: "An introduction to the memory model." +category: +tags: [rust, understanding-allocations] +--- + +There's an alchemy of distilling complex technical topics into articles and videos that change the +way programmers see the tools they interact with on a regular basis. I knew what a linker was, but +there's a staggering amount of complexity in between +[the OS and `main()`](https://www.youtube.com/watch?v=dOfucXtyEsU). Rust programmers use the +[`Box`](https://doc.rust-lang.org/stable/std/boxed/struct.Box.html) type all the time, but there's a +rich history of the Rust language itself wrapped up in +[how special it is](https://manishearth.github.io/blog/2017/01/10/rust-tidbits-box-is-special/). + +In a similar vein, this series attempts to look at code and understand how memory is used; the +complex choreography of operating system, compiler, and program that frees you to focus on +functionality far-flung from frivolous book-keeping. The Rust compiler relieves a great deal of the +cognitive burden associated with memory management, but we're going to step into its world for a +while. + +Let's learn a bit about memory in Rust. + +# Table of Contents + +This series is intended as both learning and reference material; we'll work through the different +memory types Rust uses, and explain the implications of each. Ultimately, a summary will be provided +as a cheat sheet for easy future reference. To that end, a table of contents is in order: + +- Foreword +- [Global Memory Usage: The Whole World](/2019/02/the-whole-world.html) +- [Fixed Memory: Stacking Up](/2019/02/stacking-up.html) +- [Dynamic Memory: A Heaping Helping](/2019/02/a-heaping-helping.html) +- [Compiler Optimizations: What It's Done For You Lately](/2019/02/compiler-optimizations.html) +- [Summary: What Are the Rules?](/2019/02/summary.html) + +# Foreword + +Rust's three defining features of +[Performance, Reliability, and Productivity](https://www.rust-lang.org/) are all driven to a great +degree by the how the Rust compiler understands memory usage. Unlike managed memory languages (Java, +Python), Rust +[doesn't really](https://words.steveklabnik.com/borrow-checking-escape-analysis-and-the-generational-hypothesis) +garbage collect; instead, it uses an +[ownership](https://doc.rust-lang.org/book/ch04-01-what-is-ownership.html) system to reason about +how long objects will last in your program. In some cases, if the life of an object is fairly +transient, Rust can make use of a very fast region called the "stack." When that's not possible, +Rust uses +[dynamic (heap) memory](https://en.wikipedia.org/wiki/Memory_management#Dynamic_memory_allocation) +and the ownership system to ensure you can't accidentally corrupt memory. It's not as fast, but it +is important to have available. + +That said, there are specific situations in Rust where you'd never need to worry about the +stack/heap distinction! If you: + +1. Never use `unsafe` +2. Never use `#![feature(alloc)]` or the [`alloc` crate](https://doc.rust-lang.org/alloc/index.html) + +...then it's not possible for you to use dynamic memory! + +For some uses of Rust, typically embedded devices, these constraints are OK. They have very limited +memory, and the program binary size itself may significantly affect what's available! There's no +operating system able to manage this +["virtual memory"](https://en.wikipedia.org/wiki/Virtual_memory) thing, but that's not an issue +because there's only one running application. The +[embedonomicon](https://docs.rust-embedded.org/embedonomicon/preface.html) is ever in mind, and +interacting with the "real world" through extra peripherals is accomplished by reading and writing +to [specific memory addresses](https://bob.cs.sonoma.edu/IntroCompOrg-RPi/sec-gpio-mem.html). + +Most Rust programs find these requirements overly burdensome though. C++ developers would struggle +without access to [`std::vector`](https://en.cppreference.com/w/cpp/container/vector) (except those +hardcore no-STL people), and Rust developers would struggle without +[`std::vec`](https://doc.rust-lang.org/std/vec/struct.Vec.html). But with the constraints above, +`std::vec` is actually a part of the +[`alloc` crate](https://doc.rust-lang.org/alloc/vec/struct.Vec.html), and thus off-limits. `Box`, +`Rc`, etc., are also unusable for the same reason. + +Whether writing code for embedded devices or not, the important thing in both situations is how much +you know _before your application starts_ about what its memory usage will look like. In embedded +devices, there's a small, fixed amount of memory to use. In a browser, you have no idea how large +[google.com](https://www.google.com)'s home page is until you start trying to download it. The +compiler uses this knowledge (or lack thereof) to optimize how memory is used; put simply, your code +runs faster when the compiler can guarantee exactly how much memory your program needs while it's +running. This series is all about understanding how the compiler reasons about your program, with an +emphasis on the implications for performance. + +Now let's address some conditions and caveats before going much further: + +- We'll focus on "safe" Rust only; `unsafe` lets you use platform-specific allocation API's + ([`malloc`](https://www.tutorialspoint.com/c_standard_library/c_function_malloc.htm)) that we'll + ignore. +- We'll assume a "debug" build of Rust code (what you get with `cargo run` and `cargo test`) and + address (pun intended) release mode at the end (`cargo run --release` and `cargo test --release`). +- All content will be run using Rust 1.32, as that's the highest currently supported in the + [Compiler Exporer](https://godbolt.org/). As such, we'll avoid upcoming innovations like + [compile-time evaluation of `static`](https://github.com/rust-lang/rfcs/blob/master/text/0911-const-fn.md) + that are available in nightly. +- Because of the nature of the content, being able to read assembly is helpful. We'll keep it + simple, but I [found](https://stackoverflow.com/a/4584131/1454178) a + [refresher](https://stackoverflow.com/a/26026278/1454178) on the `push` and `pop` + [instructions](http://www.cs.virginia.edu/~evans/cs216/guides/x86.html) was helpful while writing + this. +- I've tried to be precise in saying only what I can prove using the tools (ASM, docs) that are + available, but if there's something said in error it will be corrected expeditiously. Please let + me know at [bradlee@speice.io](mailto:bradlee@speice.io) + +Finally, I'll do what I can to flag potential future changes but the Rust docs have a notice worth +repeating: + +> Rust does not currently have a rigorously and formally defined memory model. +> +> -- [the docs](https://doc.rust-lang.org/std/ptr/fn.read_volatile.html) diff --git a/posts/2019/02/the-whole-world.mdx b/_posts/2019-02-05-the-whole-world.md similarity index 98% rename from posts/2019/02/the-whole-world.mdx rename to _posts/2019-02-05-the-whole-world.md index 28e4590..ef3bc47 100644 --- a/posts/2019/02/the-whole-world.mdx +++ b/_posts/2019-02-05-the-whole-world.md @@ -1,14 +1,11 @@ --- +layout: post title: "Global Memory Usage: The Whole World" -description: Static considered slightly less harmful. -published: 2019-02-05 +description: "Static considered slightly less harmful." +category: +tags: [rust, understanding-allocations] --- -import Blog from "../../LayoutBlog"; -export default Blog(frontmatter); - -$m_at^h$ - The first memory type we'll look at is pretty special: when Rust can prove that a _value_ is fixed for the life of a program (`const`), and when a _reference_ is unique for the life of a program (`static` as a declaration, not diff --git a/_posts/2019-02-06-stacking-up.md b/_posts/2019-02-06-stacking-up.md new file mode 100644 index 0000000..b060ea1 --- /dev/null +++ b/_posts/2019-02-06-stacking-up.md @@ -0,0 +1,601 @@ +--- +layout: post +title: "Fixed Memory: Stacking Up" +description: "We don't need no allocator." +category: +tags: [rust, understanding-allocations] +--- + +`const` and `static` are perfectly fine, but it's relatively rare that we know at compile-time about +either values or references that will be the same for the duration of our program. Put another way, +it's not often the case that either you or your compiler knows how much memory your entire program +will ever need. + +However, there are still some optimizations the compiler can do if it knows how much memory +individual functions will need. Specifically, the compiler can make use of "stack" memory (as +opposed to "heap" memory) which can be managed far faster in both the short- and long-term. When +requesting memory, the [`push` instruction](http://www.cs.virginia.edu/~evans/cs216/guides/x86.html) +can typically complete in [1 or 2 cycles](https://agner.org/optimize/instruction_tables.ods) (<1 +nanosecond on modern CPUs). Contrast that to heap memory which requires an allocator (specialized +software to track what memory is in use) to reserve space. When you're finished with stack memory, +the `pop` instruction runs in 1-3 cycles, as opposed to an allocator needing to worry about memory +fragmentation and other issues with the heap. All sorts of incredibly sophisticated techniques have +been used to design allocators: + +- [Garbage Collection]() + strategies like [Tracing](https://en.wikipedia.org/wiki/Tracing_garbage_collection) (used in + [Java](https://www.oracle.com/technetwork/java/javase/tech/g1-intro-jsp-135488.html)) and + [Reference counting](https://en.wikipedia.org/wiki/Reference_counting) (used in + [Python](https://docs.python.org/3/extending/extending.html#reference-counts)) +- Thread-local structures to prevent locking the allocator in + [tcmalloc](https://jamesgolick.com/2013/5/19/how-tcmalloc-works.html) +- Arena structures used in [jemalloc](http://jemalloc.net/), which + [until recently](https://blog.rust-lang.org/2019/01/17/Rust-1.32.0.html#jemalloc-is-removed-by-default) + was the primary allocator for Rust programs! + +But no matter how fast your allocator is, the principle remains: the fastest allocator is the one +you never use. As such, we're not going to discuss how exactly the +[`push` and `pop` instructions work](http://www.cs.virginia.edu/~evans/cs216/guides/x86.html), but +we'll focus instead on the conditions that enable the Rust compiler to use faster stack-based +allocation for variables. + +So, **how do we know when Rust will or will not use stack allocation for objects we create?** +Looking at other languages, it's often easy to delineate between stack and heap. Managed memory +languages (Python, Java, +[C#](https://blogs.msdn.microsoft.com/ericlippert/2010/09/30/the-truth-about-value-types/)) place +everything on the heap. JIT compilers ([PyPy](https://www.pypy.org/), +[HotSpot](https://www.oracle.com/technetwork/java/javase/tech/index-jsp-136373.html)) may optimize +some heap allocations away, but you should never assume it will happen. C makes things clear with +calls to special functions (like [malloc(3)](https://linux.die.net/man/3/malloc)) needed to access +heap memory. Old C++ has the [`new`](https://stackoverflow.com/a/655086/1454178) keyword, though +modern C++/C++11 is more complicated with [RAII](https://en.cppreference.com/w/cpp/language/raii). + +For Rust, we can summarize as follows: **stack allocation will be used for everything that doesn't +involve "smart pointers" and collections**. We'll skip over a precise definition of the term "smart +pointer" for now, and instead discuss what we should watch for to understand when stack and heap +memory regions are used: + +1. Stack manipulation instructions (`push`, `pop`, and `add`/`sub` of the `rsp` register) indicate + allocation of stack memory: + + ```rust + pub fn stack_alloc(x: u32) -> u32 { + // Space for `y` is allocated by subtracting from `rsp`, + // and then populated + let y = [1u8, 2, 3, 4]; + // Space for `y` is deallocated by adding back to `rsp` + x + } + ``` + + -- [Compiler Explorer](https://godbolt.org/z/5WSgc9) + +2. Tracking when exactly heap allocation calls occur is difficult. It's typically easier to watch + for `call core::ptr::real_drop_in_place`, and infer that a heap allocation happened in the recent + past: + + ```rust + pub fn heap_alloc(x: usize) -> usize { + // Space for elements in a vector has to be allocated + // on the heap, and is then de-allocated once the + // vector goes out of scope + let y: Vec = Vec::with_capacity(x); + x + } + ``` + + -- [Compiler Explorer](https://godbolt.org/z/epfgoQ) (`real_drop_in_place` happens on line 1317) + Note: While the + [`Drop` trait](https://doc.rust-lang.org/std/ops/trait.Drop.html) is + [called for stack-allocated objects](https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=87edf374d8983816eb3d8cfeac657b46), + the Rust standard library only defines `Drop` implementations for types that involve heap + allocation. + +3. If you don't want to inspect the assembly, use a custom allocator that's able to track and alert + when heap allocations occur. Crates like + [`alloc_counter`](https://crates.io/crates/alloc_counter) are designed for exactly this purpose. + +With all that in mind, let's talk about situations in which we're guaranteed to use stack memory: + +- Structs are created on the stack. +- Function arguments are passed on the stack, meaning the + [`#[inline]` attribute](https://doc.rust-lang.org/reference/attributes.html#inline-attribute) will + not change the memory region used. +- Enums and unions are stack-allocated. +- [Arrays](https://doc.rust-lang.org/std/primitive.array.html) are always stack-allocated. +- Closures capture their arguments on the stack. +- Generics will use stack allocation, even with dynamic dispatch. +- [`Copy`](https://doc.rust-lang.org/std/marker/trait.Copy.html) types are guaranteed to be + stack-allocated, and copying them will be done in stack memory. +- [`Iterator`s](https://doc.rust-lang.org/std/iter/trait.Iterator.html) in the standard library are + stack-allocated even when iterating over heap-based collections. + +# Structs + +The simplest case comes first. When creating vanilla `struct` objects, we use stack memory to hold +their contents: + +```rust +struct Point { + x: u64, + y: u64, +} + +struct Line { + a: Point, + b: Point, +} + +pub fn make_line() { + // `origin` is stored in the first 16 bytes of memory + // starting at location `rsp` + let origin = Point { x: 0, y: 0 }; + // `point` makes up the next 16 bytes of memory + let point = Point { x: 1, y: 2 }; + + // When creating `ray`, we just move the content out of + // `origin` and `point` into the next 32 bytes of memory + let ray = Line { a: origin, b: point }; +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/vri9BE) + +Note that while some extra-fancy instructions are used for memory manipulation in the assembly, the +`sub rsp, 64` instruction indicates we're still working with the stack. + +# Function arguments + +Have you ever wondered how functions communicate with each other? Like, once the variables are given +to you, everything's fine. But how do you "give" those variables to another function? How do you get +the results back afterward? The answer: the compiler arranges memory and assembly instructions using +a pre-determined [calling convention](http://llvm.org/docs/LangRef.html#calling-conventions). This +convention governs the rules around where arguments needed by a function will be located (either in +memory offsets relative to the stack pointer `rsp`, or in other registers), and where the results +can be found once the function has finished. And when multiple languages agree on what the calling +conventions are, you can do things like having [Go call Rust code](https://blog.filippo.io/rustgo/)! + +Put simply: it's the compiler's job to figure out how to call other functions, and you can assume +that the compiler is good at its job. + +We can see this in action using a simple example: + +```rust +struct Point { + x: i64, + y: i64, +} + +// We use integer division operations to keep +// the assembly clean, understanding the result +// isn't accurate. +fn distance(a: &Point, b: &Point) -> i64 { + // Immediately subtract from `rsp` the bytes needed + // to hold all the intermediate results - this is + // the stack allocation step + + // The compiler used the `rdi` and `rsi` registers + // to pass our arguments, so read them in + let x1 = a.x; + let x2 = b.x; + let y1 = a.y; + let y2 = b.y; + + // Do the actual math work + let x_pow = (x1 - x2) * (x1 - x2); + let y_pow = (y1 - y2) * (y1 - y2); + let squared = x_pow + y_pow; + squared / squared + + // Our final result will be stored in the `rax` register + // so that our caller knows where to retrieve it. + // Finally, add back to `rsp` the stack memory that is + // now ready to be used by other functions. +} + +pub fn total_distance() { + let start = Point { x: 1, y: 2 }; + let middle = Point { x: 3, y: 4 }; + let end = Point { x: 5, y: 6 }; + + let _dist_1 = distance(&start, &middle); + let _dist_2 = distance(&middle, &end); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/Qmx4ST) + +As a consequence of function arguments never using heap memory, we can also infer that functions +using the `#[inline]` attributes also do not heap allocate. But better than inferring, we can look +at the assembly to prove it: + +```rust +struct Point { + x: i64, + y: i64, +} + +// Note that there is no `distance` function in the assembly output, +// and the total line count goes from 229 with inlining off +// to 306 with inline on. Even still, no heap allocations occur. +#[inline(always)] +fn distance(a: &Point, b: &Point) -> i64 { + let x1 = a.x; + let x2 = b.x; + let y1 = a.y; + let y2 = b.y; + + let x_pow = (a.x - b.x) * (a.x - b.x); + let y_pow = (a.y - b.y) * (a.y - b.y); + let squared = x_pow + y_pow; + squared / squared +} + +pub fn total_distance() { + let start = Point { x: 1, y: 2 }; + let middle = Point { x: 3, y: 4 }; + let end = Point { x: 5, y: 6 }; + + let _dist_1 = distance(&start, &middle); + let _dist_2 = distance(&middle, &end); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/30Sh66) + +Finally, passing by value (arguments with type +[`Copy`](https://doc.rust-lang.org/std/marker/trait.Copy.html)) and passing by reference (either +moving ownership or passing a pointer) may have slightly different layouts in assembly, but will +still use either stack memory or CPU registers: + +```rust +pub struct Point { + x: i64, + y: i64, +} + +// Moving values +pub fn distance_moved(a: Point, b: Point) -> i64 { + let x1 = a.x; + let x2 = b.x; + let y1 = a.y; + let y2 = b.y; + + let x_pow = (x1 - x2) * (x1 - x2); + let y_pow = (y1 - y2) * (y1 - y2); + let squared = x_pow + y_pow; + squared / squared +} + +// Borrowing values has two extra `mov` instructions on lines 21 and 22 +pub fn distance_borrowed(a: &Point, b: &Point) -> i64 { + let x1 = a.x; + let x2 = b.x; + let y1 = a.y; + let y2 = b.y; + + let x_pow = (x1 - x2) * (x1 - x2); + let y_pow = (y1 - y2) * (y1 - y2); + let squared = x_pow + y_pow; + squared / squared +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/06hGiv) + +# Enums + +If you've ever worried that wrapping your types in +[`Option`](https://doc.rust-lang.org/stable/core/option/enum.Option.html) or +[`Result`](https://doc.rust-lang.org/stable/core/result/enum.Result.html) would finally make them +large enough that Rust decides to use heap allocation instead, fear no longer: `enum` and union +types don't use heap allocation: + +```rust +enum MyEnum { + Small(u8), + Large(u64) +} + +struct MyStruct { + x: MyEnum, + y: MyEnum, +} + +pub fn enum_compare() { + let x = MyEnum::Small(0); + let y = MyEnum::Large(0); + + let z = MyStruct { x, y }; + + let opt = Option::Some(z); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/HK7zBx) + +Because the size of an `enum` is the size of its largest element plus a flag, the compiler can +predict how much memory is used no matter which variant of an enum is currently stored in a +variable. Thus, enums and unions have no need of heap allocation. There's unfortunately not a great +way to show this in assembly, so I'll instead point you to the +[`core::mem::size_of`](https://doc.rust-lang.org/stable/core/mem/fn.size_of.html#size-of-enums) +documentation. + +# Arrays + +The array type is guaranteed to be stack allocated, which is why the array size must be declared. +Interestingly enough, this can be used to cause safe Rust programs to crash: + +```rust +// 256 bytes +#[derive(Default)] +struct TwoFiftySix { + _a: [u64; 32] +} + +// 8 kilobytes +#[derive(Default)] +struct EightK { + _a: [TwoFiftySix; 32] +} + +// 256 kilobytes +#[derive(Default)] +struct TwoFiftySixK { + _a: [EightK; 32] +} + +// 8 megabytes - exceeds space typically provided for the stack, +// though the kernel can be instructed to allocate more. +// On Linux, you can check stack size using `ulimit -s` +#[derive(Default)] +struct EightM { + _a: [TwoFiftySixK; 32] +} + +fn main() { + // Because we already have things in stack memory + // (like the current function call stack), allocating another + // eight megabytes of stack memory crashes the program + let _x = EightM::default(); +} +``` + +-- +[Rust Playground](https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=587a6380a4914bcbcef4192c90c01dc4) + +There aren't any security implications of this (no memory corruption occurs), but it's good to note +that the Rust compiler won't move arrays into heap memory even if they can be reasonably expected to +overflow the stack. + +# Closures + +Rules for how anonymous functions capture their arguments are typically language-specific. In Java, +[Lambda Expressions](https://docs.oracle.com/javase/tutorial/java/javaOO/lambdaexpressions.html) are +actually objects created on the heap that capture local primitives by copying, and capture local +non-primitives as (`final`) references. +[Python](https://docs.python.org/3.7/reference/expressions.html#lambda) and +[JavaScript](https://javascriptweblog.wordpress.com/2010/10/25/understanding-javascript-closures/) +both bind _everything_ by reference normally, but Python can also +[capture values](https://stackoverflow.com/a/235764/1454178) and JavaScript has +[Arrow functions](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Functions/Arrow_functions). + +In Rust, arguments to closures are the same as arguments to other functions; closures are simply +functions that don't have a declared name. Some weird ordering of the stack may be required to +handle them, but it's the compiler's responsiblity to figure that out. + +Each example below has the same effect, but a different assembly implementation. In the simplest +case, we immediately run a closure returned by another function. Because we don't store a reference +to the closure, the stack memory needed to store the captured values is contiguous: + +```rust +fn my_func() -> impl FnOnce() { + let x = 24; + // Note that this closure in assembly looks exactly like + // any other function; you even use the `call` instruction + // to start running it. + move || { x; } +} + +pub fn immediate() { + my_func()(); + my_func()(); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/mgJ2zl), 25 total assembly instructions + +If we store a reference to the closure, the Rust compiler keeps values it needs in the stack memory +of the original function. Getting the details right is a bit harder, so the instruction count goes +up even though this code is functionally equivalent to our original example: + +```rust +pub fn simple_reference() { + let x = my_func(); + let y = my_func(); + y(); + x(); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/K_dj5n), 55 total assembly instructions + +Even things like variable order can make a difference in instruction count: + +```rust +pub fn complex() { + let x = my_func(); + let y = my_func(); + x(); + y(); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/p37qFl), 70 total assembly instructions + +In every circumstance though, the compiler ensured that no heap allocations were necessary. + +# Generics + +Traits in Rust come in two broad forms: static dispatch (monomorphization, `impl Trait`) and dynamic +dispatch (trait objects, `dyn Trait`). While dynamic dispatch is often _associated_ with trait +objects being stored in the heap, dynamic dispatch can be used with stack allocated objects as well: + +```rust +trait GetInt { + fn get_int(&self) -> u64; +} + +// vtable stored at section L__unnamed_1 +struct WhyNotU8 { + x: u8 +} +impl GetInt for WhyNotU8 { + fn get_int(&self) -> u64 { + self.x as u64 + } +} + +// vtable stored at section L__unnamed_2 +struct ActualU64 { + x: u64 +} +impl GetInt for ActualU64 { + fn get_int(&self) -> u64 { + self.x + } +} + +// `&dyn` declares that we want to use dynamic dispatch +// rather than monomorphization, so there is only one +// `retrieve_int` function that shows up in the final assembly. +// If we used generics, there would be one implementation of +// `retrieve_int` for each type that implements `GetInt`. +pub fn retrieve_int(u: &dyn GetInt) { + // In the assembly, we just call an address given to us + // in the `rsi` register and hope that it was set up + // correctly when this function was invoked. + let x = u.get_int(); +} + +pub fn do_call() { + // Note that even though the vtable for `WhyNotU8` and + // `ActualU64` includes a pointer to + // `core::ptr::real_drop_in_place`, it is never invoked. + let a = WhyNotU8 { x: 0 }; + let b = ActualU64 { x: 0 }; + + retrieve_int(&a); + retrieve_int(&b); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/u_yguS) + +It's hard to imagine practical situations where dynamic dispatch would be used for objects that +aren't heap allocated, but it technically can be done. + +# Copy types + +Understanding move semantics and copy semantics in Rust is weird at first. The Rust docs +[go into detail](https://doc.rust-lang.org/stable/core/marker/trait.Copy.html) far better than can +be addressed here, so I'll leave them to do the job. From a memory perspective though, their +guideline is reasonable: +[if your type can implemement `Copy`, it should](https://doc.rust-lang.org/stable/core/marker/trait.Copy.html#when-should-my-type-be-copy). +While there are potential speed tradeoffs to _benchmark_ when discussing `Copy` (move semantics for +stack objects vs. copying stack pointers vs. copying stack `struct`s), _it's impossible for `Copy` +to introduce a heap allocation_. + +But why is this the case? Fundamentally, it's because the language controls what `Copy` means - +["the behavior of `Copy` is not overloadable"](https://doc.rust-lang.org/std/marker/trait.Copy.html#whats-the-difference-between-copy-and-clone) +because it's a marker trait. From there we'll note that a type +[can implement `Copy`](https://doc.rust-lang.org/std/marker/trait.Copy.html#when-can-my-type-be-copy) +if (and only if) its components implement `Copy`, and that +[no heap-allocated types implement `Copy`](https://doc.rust-lang.org/std/marker/trait.Copy.html#implementors). +Thus, assignments involving heap types are always move semantics, and new heap allocations won't +occur because of implicit operator behavior. + +```rust +#[derive(Clone)] +struct Cloneable { + x: Box +} + +// error[E0204]: the trait `Copy` may not be implemented for this type +#[derive(Copy, Clone)] +struct NotCopyable { + x: Box +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/VToRuK) + +# Iterators + +In managed memory languages (like +[Java](https://www.youtube.com/watch?v=bSkpMdDe4g4&feature=youtu.be&t=357)), there's a subtle +difference between these two code samples: + +```java +public static int sum_for(List vals) { + long sum = 0; + // Regular for loop + for (int i = 0; i < vals.length; i++) { + sum += vals[i]; + } + return sum; +} + +public static int sum_foreach(List vals) { + long sum = 0; + // "Foreach" loop - uses iteration + for (Long l : vals) { + sum += l; + } + return sum; +} +``` + +In the `sum_for` function, nothing terribly interesting happens. In `sum_foreach`, an object of type +[`Iterator`](https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/util/Iterator.html) +is allocated on the heap, and will eventually be garbage-collected. This isn't a great design; +iterators are often transient objects that you need during a function and can discard once the +function ends. Sounds exactly like the issue stack-allocated objects address, no? + +In Rust, iterators are allocated on the stack. The objects to iterate over are almost certainly in +heap memory, but the iterator itself +([`Iter`](https://doc.rust-lang.org/std/slice/struct.Iter.html)) doesn't need to use the heap. In +each of the examples below we iterate over a collection, but never use heap allocation: + +```rust +use std::collections::HashMap; +// There's a lot of assembly generated, but if you search in the text, +// there are no references to `real_drop_in_place` anywhere. + +pub fn sum_vec(x: &Vec) { + let mut s = 0; + // Basic iteration over vectors doesn't need allocation + for y in x { + s += y; + } +} + +pub fn sum_enumerate(x: &Vec) { + let mut s = 0; + // More complex iterators are just fine too + for (_i, y) in x.iter().enumerate() { + s += y; + } +} + +pub fn sum_hm(x: &HashMap) { + let mut s = 0; + // And it's not just Vec, all types will allocate the iterator + // on stack memory + for y in x.values() { + s += y; + } +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/FTT3CT) diff --git a/_posts/2019-02-07-a-heaping-helping.md b/_posts/2019-02-07-a-heaping-helping.md new file mode 100644 index 0000000..b68c447 --- /dev/null +++ b/_posts/2019-02-07-a-heaping-helping.md @@ -0,0 +1,254 @@ +--- +layout: post +title: "Dynamic Memory: A Heaping Helping" +description: "The reason Rust exists." +category: +tags: [rust, understanding-allocations] +--- + +Managing dynamic memory is hard. Some languages assume users will do it themselves (C, C++), and +some languages go to extreme lengths to protect users from themselves (Java, Python). In Rust, how +the language uses dynamic memory (also referred to as the **heap**) is a system called _ownership_. +And as the docs mention, ownership +[is Rust's most unique feature](https://doc.rust-lang.org/book/ch04-00-understanding-ownership.html). + +The heap is used in two situations; when the compiler is unable to predict either the _total size of +memory needed_, or _how long the memory is needed for_, it allocates space in the heap. This happens +pretty frequently; if you want to download the Google home page, you won't know how large it is +until your program runs. And when you're finished with Google, we deallocate the memory so it can be +used to store other webpages. If you're interested in a slightly longer explanation of the heap, +check out +[The Stack and the Heap](https://doc.rust-lang.org/book/ch04-01-what-is-ownership.html#the-stack-and-the-heap) +in Rust's documentation. + +We won't go into detail on how the heap is managed; the +[ownership documentation](https://doc.rust-lang.org/book/ch04-01-what-is-ownership.html) does a +phenomenal job explaining both the "why" and "how" of memory management. Instead, we're going to +focus on understanding "when" heap allocations occur in Rust. + +To start off, take a guess for how many allocations happen in the program below: + +```rust +fn main() {} +``` + +It's obviously a trick question; while no heap allocations occur as a result of that code, the setup +needed to call `main` does allocate on the heap. Here's a way to show it: + +```rust +#![feature(integer_atomics)] +use std::alloc::{GlobalAlloc, Layout, System}; +use std::sync::atomic::{AtomicU64, Ordering}; + +static ALLOCATION_COUNT: AtomicU64 = AtomicU64::new(0); + +struct CountingAllocator; + +unsafe impl GlobalAlloc for CountingAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + ALLOCATION_COUNT.fetch_add(1, Ordering::SeqCst); + System.alloc(layout) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + System.dealloc(ptr, layout); + } +} + +#[global_allocator] +static A: CountingAllocator = CountingAllocator; + +fn main() { + let x = ALLOCATION_COUNT.fetch_add(0, Ordering::SeqCst); + println!("There were {} allocations before calling main!", x); +} +``` + +-- +[Rust Playground](https://play.rust-lang.org/?version=nightly&mode=debug&edition=2018&gist=fb5060025ba79fc0f906b65a4ef8eb8e) + +As of the time of writing, there are five allocations that happen before `main` is ever called. + +But when we want to understand more practically where heap allocation happens, we'll follow this +guide: + +- Smart pointers hold their contents in the heap +- Collections are smart pointers for many objects at a time, and reallocate when they need to grow + +Finally, there are two "addendum" issues that are important to address when discussing Rust and the +heap: + +- Non-heap alternatives to many standard library types are available. +- Special allocators to track memory behavior should be used to benchmark code. + +# Smart pointers + +The first thing to note are the "smart pointer" types. When you have data that must outlive the +scope in which it is declared, or your data is of unknown or dynamic size, you'll make use of these +types. + +The term [smart pointer](https://en.wikipedia.org/wiki/Smart_pointer) comes from C++, and while it's +closely linked to a general design pattern of +["Resource Acquisition Is Initialization"](https://en.cppreference.com/w/cpp/language/raii), we'll +use it here specifically to describe objects that are responsible for managing ownership of data +allocated on the heap. The smart pointers available in the `alloc` crate should look mostly +familiar: + +- [`Box`](https://doc.rust-lang.org/alloc/boxed/struct.Box.html) +- [`Rc`](https://doc.rust-lang.org/alloc/rc/struct.Rc.html) +- [`Arc`](https://doc.rust-lang.org/alloc/sync/struct.Arc.html) +- [`Cow`](https://doc.rust-lang.org/alloc/borrow/enum.Cow.html) + +The [standard library](https://doc.rust-lang.org/std/) also defines some smart pointers to manage +heap objects, though more than can be covered here. Some examples are: + +- [`RwLock`](https://doc.rust-lang.org/std/sync/struct.RwLock.html) +- [`Mutex`](https://doc.rust-lang.org/std/sync/struct.Mutex.html) + +Finally, there is one ["gotcha"](https://www.merriam-webster.com/dictionary/gotcha): **cell types** +(like [`RefCell`](https://doc.rust-lang.org/stable/core/cell/struct.RefCell.html)) look and behave +similarly, but **don't involve heap allocation**. The +[`core::cell` docs](https://doc.rust-lang.org/stable/core/cell/index.html) have more information. + +When a smart pointer is created, the data it is given is placed in heap memory and the location of +that data is recorded in the smart pointer. Once the smart pointer has determined it's safe to +deallocate that memory (when a `Box` has +[gone out of scope](https://doc.rust-lang.org/stable/std/boxed/index.html) or a reference count +[goes to zero](https://doc.rust-lang.org/alloc/rc/index.html)), the heap space is reclaimed. We can +prove these types use heap memory by looking at code: + +```rust +use std::rc::Rc; +use std::sync::Arc; +use std::borrow::Cow; + +pub fn my_box() { + // Drop at assembly line 1640 + Box::new(0); +} + +pub fn my_rc() { + // Drop at assembly line 1650 + Rc::new(0); +} + +pub fn my_arc() { + // Drop at assembly line 1660 + Arc::new(0); +} + +pub fn my_cow() { + // Drop at assembly line 1672 + Cow::from("drop"); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/4AMQug) + +# Collections + +Collection types use heap memory because their contents have dynamic size; they will request more +memory [when needed](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.reserve), and can +[release memory](https://doc.rust-lang.org/std/vec/struct.Vec.html#method.shrink_to_fit) when it's +no longer necessary. This dynamic property forces Rust to heap allocate everything they contain. In +a way, **collections are smart pointers for many objects at a time**. Common types that fall under +this umbrella are [`Vec`](https://doc.rust-lang.org/stable/alloc/vec/struct.Vec.html), +[`HashMap`](https://doc.rust-lang.org/stable/std/collections/struct.HashMap.html), and +[`String`](https://doc.rust-lang.org/stable/alloc/string/struct.String.html) (not +[`str`](https://doc.rust-lang.org/std/primitive.str.html)). + +While collections store the objects they own in heap memory, _creating new collections will not +allocate on the heap_. This is a bit weird; if we call `Vec::new()`, the assembly shows a +corresponding call to `real_drop_in_place`: + +```rust +pub fn my_vec() { + // Drop in place at line 481 + Vec::::new(); +} +``` + +-- [Compiler Explorer](https://godbolt.org/z/1WkNtC) + +But because the vector has no elements to manage, no calls to the allocator will ever be dispatched: + +```rust +use std::alloc::{GlobalAlloc, Layout, System}; +use std::sync::atomic::{AtomicBool, Ordering}; + +fn main() { + // Turn on panicking if we allocate on the heap + DO_PANIC.store(true, Ordering::SeqCst); + + // Interesting bit happens here + let x: Vec = Vec::new(); + drop(x); + + // Turn panicking back off, some deallocations occur + // after main as well. + DO_PANIC.store(false, Ordering::SeqCst); +} + +#[global_allocator] +static A: PanicAllocator = PanicAllocator; +static DO_PANIC: AtomicBool = AtomicBool::new(false); +struct PanicAllocator; + +unsafe impl GlobalAlloc for PanicAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + if DO_PANIC.load(Ordering::SeqCst) { + panic!("Unexpected allocation."); + } + System.alloc(layout) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + if DO_PANIC.load(Ordering::SeqCst) { + panic!("Unexpected deallocation."); + } + System.dealloc(ptr, layout); + } +} +``` + +-- +[Rust Playground](https://play.rust-lang.org/?version=stable&mode=debug&edition=2018&gist=831a297d176d015b1f9ace01ae416cc6) + +Other standard library types follow the same behavior; make sure to check out +[`HashMap::new()`](https://doc.rust-lang.org/std/collections/hash_map/struct.HashMap.html#method.new), +and [`String::new()`](https://doc.rust-lang.org/std/string/struct.String.html#method.new). + +# Heap Alternatives + +While it is a bit strange to speak of the stack after spending time with the heap, it's worth +pointing out that some heap-allocated objects in Rust have stack-based counterparts provided by +other crates. If you have need of the functionality, but want to avoid allocating, there are +typically alternatives available. + +When it comes to some standard library smart pointers +([`RwLock`](https://doc.rust-lang.org/std/sync/struct.RwLock.html) and +[`Mutex`](https://doc.rust-lang.org/std/sync/struct.Mutex.html)), stack-based alternatives are +provided in crates like [parking_lot](https://crates.io/crates/parking_lot) and +[spin](https://crates.io/crates/spin). You can check out +[`lock_api::RwLock`](https://docs.rs/lock_api/0.1.5/lock_api/struct.RwLock.html), +[`lock_api::Mutex`](https://docs.rs/lock_api/0.1.5/lock_api/struct.Mutex.html), and +[`spin::Once`](https://mvdnes.github.io/rust-docs/spin-rs/spin/struct.Once.html) if you're in need +of synchronization primitives. + +[thread_id](https://crates.io/crates/thread-id) may be necessary if you're implementing an allocator +because [`thread::current().id()`](https://doc.rust-lang.org/std/thread/struct.ThreadId.html) uses a +[`thread_local!` structure](https://doc.rust-lang.org/stable/src/std/sys_common/thread_info.rs.html#17-36) +that needs heap allocation. + +# Tracing Allocators + +When writing performance-sensitive code, there's no alternative to measuring your code. If you +didn't write a benchmark, +[you don't care about it's performance](https://www.youtube.com/watch?v=2EWejmkKlxs&feature=youtu.be&t=263) +You should never rely on your instincts when +[a microsecond is an eternity](https://www.youtube.com/watch?v=NH1Tta7purM). + +Similarly, there's great work going on in Rust with allocators that keep track of what they're doing +(like [`alloc_counter`](https://crates.io/crates/alloc_counter)). When it comes to tracking heap +behavior, it's easy to make mistakes; please write tests and make sure you have tools to guard +against future issues. diff --git a/_posts/2019-02-08-compiler-optimizations.md b/_posts/2019-02-08-compiler-optimizations.md new file mode 100644 index 0000000..4b8b385 --- /dev/null +++ b/_posts/2019-02-08-compiler-optimizations.md @@ -0,0 +1,148 @@ +--- +layout: post +title: "Compiler Optimizations: What It's Done Lately" +description: "A lot. The answer is a lot." +category: +tags: [rust, understanding-allocations] +--- + +**Update 2019-02-10**: When debugging a +[related issue](https://gitlab.com/sio4/code/alloc-counter/issues/1), it was discovered that the +original code worked because LLVM optimized out the entire function, rather than just the allocation +segments. The code has been updated with proper use of +[`read_volatile`](https://doc.rust-lang.org/std/ptr/fn.read_volatile.html), and a previous section +on vector capacity has been removed. + +--- + +Up to this point, we've been discussing memory usage in the Rust language by focusing on simple +rules that are mostly right for small chunks of code. We've spent time showing how those rules work +themselves out in practice, and become familiar with reading the assembly code needed to see each +memory type (global, stack, heap) in action. + +Throughout the series so far, we've put a handicap on the code. In the name of consistent and +understandable results, we've asked the compiler to pretty please leave the training wheels on. Now +is the time where we throw out all the rules and take off the kid gloves. As it turns out, both the +Rust compiler and the LLVM optimizers are incredibly sophisticated, and we'll step back and let them +do their job. + +Similar to +["What Has My Compiler Done For Me Lately?"](https://www.youtube.com/watch?v=bSkpMdDe4g4), we're +focusing on interesting things the Rust language (and LLVM!) can do with memory management. We'll +still be looking at assembly code to understand what's going on, but it's important to mention +again: **please use automated tools like [alloc-counter](https://crates.io/crates/alloc_counter) to +double-check memory behavior if it's something you care about**. It's far too easy to mis-read +assembly in large code sections, you should always verify behavior if you care about memory usage. + +The guiding principal as we move forward is this: _optimizing compilers won't produce worse programs +than we started with._ There won't be any situations where stack allocations get moved to heap +allocations. There will, however, be an opera of optimization. + +# The Case of the Disappearing Box + +Our first optimization comes when LLVM can reason that the lifetime of an object is sufficiently +short that heap allocations aren't necessary. In these cases, LLVM will move the allocation to the +stack instead! The way this interacts with `#[inline]` attributes is a bit opaque, but the important +part is that LLVM can sometimes do better than the baseline Rust language: + +```rust +use std::alloc::{GlobalAlloc, Layout, System}; +use std::sync::atomic::{AtomicBool, Ordering}; + +pub fn cmp(x: u32) { + // Turn on panicking if we allocate on the heap + DO_PANIC.store(true, Ordering::SeqCst); + + // The compiler is able to see through the constant `Box` + // and directly compare `x` to 24 - assembly line 73 + let y = Box::new(24); + let equals = x == *y; + + // This call to drop is eliminated + drop(y); + + // Need to mark the comparison result as volatile so that + // LLVM doesn't strip out all the code. If `y` is marked + // volatile instead, allocation will be forced. + unsafe { std::ptr::read_volatile(&equals) }; + + // Turn off panicking, as there are some deallocations + // when we exit main. + DO_PANIC.store(false, Ordering::SeqCst); +} + +fn main() { + cmp(12) +} + +#[global_allocator] +static A: PanicAllocator = PanicAllocator; +static DO_PANIC: AtomicBool = AtomicBool::new(false); +struct PanicAllocator; + +unsafe impl GlobalAlloc for PanicAllocator { + unsafe fn alloc(&self, layout: Layout) -> *mut u8 { + if DO_PANIC.load(Ordering::SeqCst) { + panic!("Unexpected allocation."); + } + System.alloc(layout) + } + + unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) { + if DO_PANIC.load(Ordering::SeqCst) { + panic!("Unexpected deallocation."); + } + System.dealloc(ptr, layout); + } +} +``` + +## -- [Compiler Explorer](https://godbolt.org/z/BZ_Yp3) + +[Rust Playground](https://play.rust-lang.org/?version=stable&mode=release&edition=2018&gist=4a765f753183d5b919f62c71d2109d5d) + +# Dr. Array or: How I Learned to Love the Optimizer + +Finally, this isn't so much about LLVM figuring out different memory behavior, but LLVM stripping +out code that doesn't do anything. Optimizations of this type have a lot of nuance to them; if +you're not careful, they can make your benchmarks look +[impossibly good](https://www.youtube.com/watch?v=nXaxk27zwlk&feature=youtu.be&t=1199). In Rust, the +`black_box` function (implemented in both +[`libtest`](https://doc.rust-lang.org/1.1.0/test/fn.black_box.html) and +[`criterion`](https://docs.rs/criterion/0.2.10/criterion/fn.black_box.html)) will tell the compiler +to disable this kind of optimization. But if you let LLVM remove unnecessary code, you can end up +running programs that previously caused errors: + +```rust +#[derive(Default)] +struct TwoFiftySix { + _a: [u64; 32] +} + +#[derive(Default)] +struct EightK { + _a: [TwoFiftySix; 32] +} + +#[derive(Default)] +struct TwoFiftySixK { + _a: [EightK; 32] +} + +#[derive(Default)] +struct EightM { + _a: [TwoFiftySixK; 32] +} + +pub fn main() { + // Normally this blows up because we can't reserve size on stack + // for the `EightM` struct. But because the compiler notices we + // never do anything with `_x`, it optimizes out the stack storage + // and the program completes successfully. + let _x = EightM::default(); +} +``` + +## -- [Compiler Explorer](https://godbolt.org/z/daHn7P) + +[Rust Playground](https://play.rust-lang.org/?version=stable&mode=release&edition=2018&gist=4c253bf26072119896ab93c6ef064dc0) diff --git a/_posts/2019-02-09-summary.md b/_posts/2019-02-09-summary.md new file mode 100644 index 0000000..dd7f06d --- /dev/null +++ b/_posts/2019-02-09-summary.md @@ -0,0 +1,35 @@ +--- +layout: post +title: "Summary: What are the Allocation Rules?" +description: "A synopsis and reference." +category: +tags: [rust, understanding-allocations] +--- + +While there's a lot of interesting detail captured in this series, it's often helpful to have a +document that answers some "yes/no" questions. You may not care about what an `Iterator` looks like +in assembly, you just need to know whether it allocates an object on the heap or not. And while Rust +will prioritize the fastest behavior it can, here are the rules for each memory type: + +**Heap Allocation**: + +- Smart pointers (`Box`, `Rc`, `Mutex`, etc.) allocate their contents in heap memory. +- Collections (`HashMap`, `Vec`, `String`, etc.) allocate their contents in heap memory. +- Some smart pointers in the standard library have counterparts in other crates that don't need heap + memory. If possible, use those. + +**Stack Allocation**: + +- Everything not using a smart pointer will be allocated on the stack. +- Structs, enums, iterators, arrays, and closures are all stack allocated. +- Cell types (`RefCell`) behave like smart pointers, but are stack-allocated. +- Inlining (`#[inline]`) will not affect allocation behavior for better or worse. +- Types that are marked `Copy` are guaranteed to have their contents stack-allocated. + +**Global Allocation**: + +- `const` is a fixed value; the compiler is allowed to copy it wherever useful. +- `static` is a fixed reference; the compiler will guarantee it is unique. + +![Container Sizes in Rust](/assets/images/2019-02-04-container-size.svg) -- +[Raph Levien](https://docs.google.com/presentation/d/1q-c7UAyrUlM-eZyTo1pd8SZ0qwA_wYxmPZVOQkoDmH4/edit?usp=sharing) diff --git a/_posts/2019-05-03-making-bread.md b/_posts/2019-05-03-making-bread.md new file mode 100644 index 0000000..de794e0 --- /dev/null +++ b/_posts/2019-05-03-making-bread.md @@ -0,0 +1,52 @@ +--- +layout: post +title: "Making Bread" +description: "...because I've got some free time now. 🍞" +category: +tags: [baking] +--- + +Having recently started my "gardening leave" between positions, I have some more personal time +available. I'm planning to stay productive, contributing to some open-source projects, but it also +occurred to me that despite [talking about](https://speice.io/2018/05/hello.html) bread pics, this +blog has been purely technical. Maybe I'll change the site title from "The Old Speice Guy" to "Bites +and Bytes"? + +Either way, I'm baking a little bit again, and figured it was worth taking a quick break to focus on +some lighter material. I recently learned two critically important lessons: first, the temperature +of the dough when you put the yeast in makes a huge difference. + +Previously, when I wasn't paying attention to dough temperature: + +![Whole weat dough](/assets/images/2019-05-03-making-bread/whole-wheat-not-rising.jpg) + +Compared with what happens when I put the dough in the microwave for a defrost cycle because the +water I used wasn't warm enough: + +![White dough](/assets/images/2019-05-03-making-bread/white-dough-rising-before-fold.jpg) + +I mean, just look at the bubbles! + +![White dough with bubbles](/assets/images/2019-05-03-making-bread/white-dough-rising-after-fold.jpg) + +After shaping the dough, I've got two loaves ready: + +![Shaped loaves](/assets/images/2019-05-03-making-bread/shaped-loaves.jpg) + +Now, the recipe normally calls for a Dutch Oven to bake the bread because it keeps the dough from +drying out in the oven. Because I don't own a Dutch Oven, I typically put a casserole dish on the +bottom rack and fill it with water so there's still some moisture in the oven. This time, I forgot +to add the water and learned my second lesson: never add room-temperature water to a glass dish +that's currently at 500 degrees. + +![Shattered glass dish](/assets/images/2019-05-03-making-bread/shattered-glass.jpg) + +Needless to say, trying to pull out sharp glass from an incredibly hot oven is not what I expected +to be doing during my garden leave. + +In the end, the bread crust wasn't great, but the bread itself turned out pretty alright: + +![Baked bread](/assets/images/2019-05-03-making-bread/final-product.jpg) + +I've been writing a lot more during this break, so I'm looking forward to sharing that in the +future. In the mean-time, I'm planning on making a sandwich. diff --git a/_posts/2019-06-31-high-performance-systems.md b/_posts/2019-06-31-high-performance-systems.md new file mode 100644 index 0000000..23ef44b --- /dev/null +++ b/_posts/2019-06-31-high-performance-systems.md @@ -0,0 +1,296 @@ +--- +layout: post +title: "On Building High Performance Systems" +description: "" +category: +tags: [] +--- + +**Update 2019-09-21**: Added notes on `isolcpus` and `systemd` affinity. + +Prior to working in the trading industry, my assumption was that High Frequency Trading (HFT) is +made up of people who have access to secret techniques mortal developers could only dream of. There +had to be some secret art that could only be learned if one had an appropriately tragic backstory: + +kung-fu fight +> How I assumed HFT people learn their secret techniques + +How else do you explain people working on systems that complete the round trip of market data in to +orders out (a.k.a. tick-to-trade) consistently within +[750-800 nanoseconds](https://stackoverflow.com/a/22082528/1454178)? In roughly the time it takes a +computer to access +[main memory 8 times](https://people.eecs.berkeley.edu/~rcs/research/interactive_latency.html), +trading systems are capable of reading the market data packets, deciding what orders to send, doing +risk checks, creating new packets for exchange-specific protocols, and putting those packets on the +wire. + +Having now worked in the trading industry, I can confirm the developers aren't super-human; I've +made some simple mistakes at the very least. Instead, what shows up in public discussions is that +philosophy, not technique, separates high-performance systems from everything else. +Performance-critical systems don't rely on "this one cool C++ optimization trick" to make code fast +(though micro-optimizations have their place); there's a lot more to worry about than just the code +written for the project. + +The framework I'd propose is this: **If you want to build high-performance systems, focus first on +reducing performance variance** (reducing the gap between the fastest and slowest runs of the same +code), **and only look at average latency once variance is at an acceptable level**. + +Don't get me wrong, I'm a much happier person when things are fast. Computer goes from booting in 20 +seconds down to 10 because I installed a solid-state drive? Awesome. But if every fifth day it takes +a full minute to boot because of corrupted sectors? Not so great. Average speed over the course of a +week is the same in each situation, but you're painfully aware of that minute when it happens. When +it comes to code, the principal is the same: speeding up a function by an average of 10 milliseconds +doesn't mean much if there's a 100ms difference between your fastest and slowest runs. When +performance matters, you need to respond quickly _every time_, not just in aggregate. +High-performance systems should first optimize for time variance. Once you're consistent at the time +scale you care about, then focus on improving average time. + +This focus on variance shows up all the time in industry too (emphasis added in all quotes below): + +- In [marketing materials](https://business.nasdaq.com/market-tech/marketplaces/trading) for + NASDAQ's matching engine, the most performance-sensitive component of the exchange, dependability + is highlighted in addition to instantaneous metrics: + + > Able to **consistently sustain** an order rate of over 100,000 orders per second at sub-40 + > microsecond average latency + +- The [Aeron](https://github.com/real-logic/aeron) message bus has this to say about performance: + + > Performance is the key focus. Aeron is designed to be the highest throughput with the lowest and + > **most predictable latency possible** of any messaging system + +- The company PolySync, which is working on autonomous vehicles, + [mentions why](https://polysync.io/blog/session-types-for-hearty-codecs/) they picked their + specific messaging format: + + > In general, high performance is almost always desirable for serialization. But in the world of + > autonomous vehicles, **steady timing performance is even more important** than peak throughput. + > This is because safe operation is sensitive to timing outliers. Nobody wants the system that + > decides when to slam on the brakes to occasionally take 100 times longer than usual to encode + > its commands. + +- [Solarflare](https://solarflare.com/), which makes highly-specialized network hardware, points out + variance (jitter) as a big concern for + [electronic trading](https://solarflare.com/electronic-trading/): + > The high stakes world of electronic trading, investment banks, market makers, hedge funds and + > exchanges demand the **lowest possible latency and jitter** while utilizing the highest + > bandwidth and return on their investment. + +And to further clarify: we're not discussing _total run-time_, but variance of total run-time. There +are situations where it's not reasonably possible to make things faster, and you'd much rather be +consistent. For example, trading firms use +[wireless networks](https://sniperinmahwah.wordpress.com/2017/06/07/network-effects-part-i/) because +the speed of light through air is faster than through fiber-optic cables. There's still at _absolute +minimum_ a [~33.76 millisecond](http://tinyurl.com/y2vd7tn8) delay required to send data between, +say, +[Chicago and Tokyo](https://www.theice.com/market-data/connectivity-and-feeds/wireless/tokyo-chicago). +If a trading system in Chicago calls the function for "send order to Tokyo" and waits to see if a +trade occurs, there's a physical limit to how long that will take. In this situation, the focus is +on keeping variance of _additional processing_ to a minimum, since speed of light is the limiting +factor. + +So how does one go about looking for and eliminating performance variance? To tell the truth, I +don't think a systematic answer or flow-chart exists. There's no substitute for (A) building a deep +understanding of the entire technology stack, and (B) actually measuring system performance (though +(C) watching a lot of [CppCon](https://www.youtube.com/channel/UCMlGfpWw-RUdWX_JbLCukXg) videos for +inspiration never hurt). Even then, every project cares about performance to a different degree; you +may need to build an entire +[replica production system](https://www.youtube.com/watch?v=NH1Tta7purM&feature=youtu.be&t=3015) to +accurately benchmark at nanosecond precision, or you may be content to simply +[avoid garbage collection](https://www.youtube.com/watch?v=BD9cRbxWQx8&feature=youtu.be&t=1335) in +your Java code. + +Even though everyone has different needs, there are still common things to look for when trying to +isolate and eliminate variance. In no particular order, these are my focus areas when thinking about +high-performance systems: + +## Language-specific + +**Garbage Collection**: How often does garbage collection happen? When is it triggered? What are the +impacts? + +- [In Python](https://rushter.com/blog/python-garbage-collector/), individual objects are collected + if the reference count reaches 0, and each generation is collected if + `num_alloc - num_dealloc > gc_threshold` whenever an allocation happens. The GIL is acquired for + the duration of generational collection. +- Java has + [many](https://docs.oracle.com/en/java/javase/12/gctuning/parallel-collector1.html#GUID-DCDD6E46-0406-41D1-AB49-FB96A50EB9CE) + [different](https://docs.oracle.com/en/java/javase/12/gctuning/garbage-first-garbage-collector.html#GUID-ED3AB6D3-FD9B-4447-9EDF-983ED2F7A573) + [collection](https://docs.oracle.com/en/java/javase/12/gctuning/garbage-first-garbage-collector-tuning.html#GUID-90E30ACA-8040-432E-B3A0-1E0440AB556A) + [algorithms](https://docs.oracle.com/en/java/javase/12/gctuning/z-garbage-collector1.html#GUID-A5A42691-095E-47BA-B6DC-FB4E5FAA43D0) + to choose from, each with different characteristics. The default algorithms (Parallel GC in Java + 8, G1 in Java 9) freeze the JVM while collecting, while more recent algorithms + ([ZGC](https://wiki.openjdk.java.net/display/zgc) and + [Shenandoah](https://wiki.openjdk.java.net/display/shenandoah)) are designed to keep "stop the + world" to a minimum by doing collection work in parallel. + +**Allocation**: Every language has a different way of interacting with "heap" memory, but the +principle is the same: running the allocator to allocate/deallocate memory takes time that can often +be put to better use. Understanding when your language interacts with the allocator is crucial, and +not always obvious. For example: C++ and Rust don't allocate heap memory for iterators, but Java +does (meaning potential GC pauses). Take time to understand heap behavior (I made a +[a guide for Rust](/2019/02/understanding-allocations-in-rust.html)), and look into alternative +allocators ([jemalloc](http://jemalloc.net/), +[tcmalloc](https://gperftools.github.io/gperftools/tcmalloc.html)) that might run faster than the +operating system default. + +**Data Layout**: How your data is arranged in memory matters; +[data-oriented design](https://www.youtube.com/watch?v=yy8jQgmhbAU) and +[cache locality](https://www.youtube.com/watch?v=2EWejmkKlxs&feature=youtu.be&t=1185) can have huge +impacts on performance. The C family of languages (C, value types in C#, C++) and Rust all have +guarantees about the shape every object takes in memory that others (e.g. Java and Python) can't +make. [Cachegrind](http://valgrind.org/docs/manual/cg-manual.html) and kernel +[perf](https://perf.wiki.kernel.org/index.php/Main_Page) counters are both great for understanding +how performance relates to memory layout. + +**Just-In-Time Compilation**: Languages that are compiled on the fly (LuaJIT, C#, Java, PyPy) are +great because they optimize your program for how it's actually being used, rather than how a +compiler expects it to be used. However, there's a variance problem if the program stops executing +while waiting for translation from VM bytecode to native code. As a remedy, many languages support +ahead-of-time compilation in addition to the JIT versions +([CoreRT](https://github.com/dotnet/corert) in C# and [GraalVM](https://www.graalvm.org/) in Java). +On the other hand, LLVM supports +[Profile Guided Optimization](https://clang.llvm.org/docs/UsersManual.html#profile-guided-optimization), +which theoretically brings JIT benefits to non-JIT languages. Finally, be careful to avoid comparing +apples and oranges during benchmarks; you don't want your code to suddenly speed up because the JIT +compiler kicked in. + +**Programming Tricks**: These won't make or break performance, but can be useful in specific +circumstances. For example, C++ can use +[templates instead of branches](https://www.youtube.com/watch?v=NH1Tta7purM&feature=youtu.be&t=1206) +in critical sections. + +## Kernel + +Code you wrote is almost certainly not the _only_ code running on your hardware. There are many ways +the operating system interacts with your program, from interrupts to system calls, that are +important to watch for. These are written from a Linux perspective, but Windows does typically have +equivalent functionality. + +**Scheduling**: The kernel is normally free to schedule any process on any core, so it's important +to reserve CPU cores exclusively for the important programs. There are a few parts to this: first, +limit the CPU cores that non-critical processes are allowed to run on by excluding cores from +scheduling +([`isolcpus`](https://www.linuxtopia.org/online_books/linux_kernel/kernel_configuration/re46.html) +kernel command-line option), or by setting the `init` process CPU affinity +([`systemd` example](https://access.redhat.com/solutions/2884991)). Second, set critical processes +to run on the isolated cores by setting the +[processor affinity](https://en.wikipedia.org/wiki/Processor_affinity) using +[taskset](https://linux.die.net/man/1/taskset). Finally, use +[`NO_HZ`](https://github.com/torvalds/linux/blob/master/Documentation/timers/NO_HZ.txt) or +[`chrt`](https://linux.die.net/man/1/chrt) to disable scheduling interrupts. Turning off +hyper-threading is also likely beneficial. + +**System calls**: Reading from a UNIX socket? Writing to a file? In addition to not knowing how long +the I/O operation takes, these all trigger expensive +[system calls (syscalls)](https://en.wikipedia.org/wiki/System_call). To handle these, the CPU must +[context switch](https://en.wikipedia.org/wiki/Context_switch) to the kernel, let the kernel +operation complete, then context switch back to your program. We'd rather keep these +[to a minimum](https://www.destroyallsoftware.com/talks/the-birth-and-death-of-javascript) (see +timestamp 18:20). [Strace](https://linux.die.net/man/1/strace) is your friend for understanding when +and where syscalls happen. + +**Signal Handling**: Far less likely to be an issue, but signals do trigger a context switch if your +code has a handler registered. This will be highly dependent on the application, but you can +[block signals](https://www.linuxprogrammingblog.com/all-about-linux-signals?page=show#Blocking_signals) +if it's an issue. + +**Interrupts**: System interrupts are how devices connected to your computer notify the CPU that +something has happened. The CPU will then choose a processor core to pause and context switch to the +OS to handle the interrupt. Make sure that +[SMP affinity](http://www.alexonlinux.com/smp-affinity-and-proper-interrupt-handling-in-linux) is +set so that interrupts are handled on a CPU core not running the program you care about. + +**[NUMA](https://www.kernel.org/doc/html/latest/vm/numa.html)**: While NUMA is good at making +multi-cell systems transparent, there are variance implications; if the kernel moves a process +across nodes, future memory accesses must wait for the controller on the original node. Use +[numactl](https://linux.die.net/man/8/numactl) to handle memory-/cpu-cell pinning so this doesn't +happen. + +## Hardware + +**CPU Pipelining/Speculation**: Speculative execution in modern processors gave us vulnerabilities +like Spectre, but it also gave us performance improvements like +[branch prediction](https://stackoverflow.com/a/11227902/1454178). And if the CPU mis-speculates +your code, there's variance associated with rewind and replay. While the compiler knows a lot about +how your CPU [pipelines instructions](https://youtu.be/nAbCKa0FzjQ?t=4467), code can be +[structured to help](https://www.youtube.com/watch?v=NH1Tta7purM&feature=youtu.be&t=755) the branch +predictor. + +**Paging**: For most systems, virtual memory is incredible. Applications live in their own worlds, +and the CPU/[MMU](https://en.wikipedia.org/wiki/Memory_management_unit) figures out the details. +However, there's a variance penalty associated with memory paging and caching; if you access more +memory pages than the [TLB](https://en.wikipedia.org/wiki/Translation_lookaside_buffer) can store, +you'll have to wait for the page walk. Kernel perf tools are necessary to figure out if this is an +issue, but using [huge pages](https://blog.pythian.com/performance-tuning-hugepages-in-linux/) can +reduce TLB burdens. Alternately, running applications in a hypervisor like +[Jailhouse](https://github.com/siemens/jailhouse) allows one to skip virtual memory entirely, but +this is probably more work than the benefits are worth. + +**Network Interfaces**: When more than one computer is involved, variance can go up dramatically. +Tuning kernel +[network parameters](https://github.com/leandromoreira/linux-network-performance-parameters) may be +helpful, but modern systems more frequently opt to skip the kernel altogether with a technique +called [kernel bypass](https://blog.cloudflare.com/kernel-bypass/). This typically requires +specialized hardware and [drivers](https://www.openonload.org/), but even industries like +[telecom](https://www.bbc.co.uk/rd/blog/2018-04-high-speed-networking-open-source-kernel-bypass) are +finding the benefits. + +## Networks + +**Routing**: There's a reason financial firms are willing to pay +[millions of euros](https://sniperinmahwah.wordpress.com/2019/03/26/4-les-moeres-english-version/) +for rights to a small plot of land - having a straight-line connection from point A to point B means +the path their data takes is the shortest possible. In contrast, there are currently 6 computers in +between me and Google, but that may change at any moment if my ISP realizes a +[more efficient route](https://en.wikipedia.org/wiki/Border_Gateway_Protocol) is available. Whether +it's using +[research-quality equipment](https://sniperinmahwah.wordpress.com/2018/05/07/shortwave-trading-part-i-the-west-chicago-tower-mystery/) +for shortwave radio, or just making sure there's no data inadvertently going between data centers, +routing matters. + +**Protocol**: TCP as a network protocol is awesome: guaranteed and in-order delivery, flow control, +and congestion control all built in. But these attributes make the most sense when networking +infrastructure is lossy; for systems that expect nearly all packets to be delivered correctly, the +setup handshaking and packet acknowledgment are just overhead. Using UDP (unicast or multicast) may +make sense in these contexts as it avoids the chatter needed to track connection state, and +[gap-fill](https://iextrading.com/docs/IEX%20Transport%20Specification.pdf) +[strategies](http://www.nasdaqtrader.com/content/technicalsupport/specifications/dataproducts/moldudp64.pdf) +can handle the rest. + +**Switching**: Many routers/switches handle packets using "store-and-forward" behavior: wait for the +whole packet, validate checksums, and then send to the next device. In variance terms, the time +needed to move data between two nodes is proportional to the size of that data; the switch must +"store" all data before it can calculate checksums and "forward" to the next node. With +["cut-through"](https://www.networkworld.com/article/2241573/latency-and-jitter--cut-through-design-pays-off-for-arista--blade.html) +designs, switches will begin forwarding data as soon as they know where the destination is, +checksums be damned. This means there's a fixed cost (at the switch) for network traffic, no matter +the size. + +# Final Thoughts + +High-performance systems, regardless of industry, are not magical. They do require extreme precision +and attention to detail, but they're designed, built, and operated by regular people, using a lot of +tools that are publicly available. Interested in seeing how context switching affects performance of +your benchmarks? `taskset` should be installed in all modern Linux distributions, and can be used to +make sure the OS never migrates your process. Curious how often garbage collection triggers during a +crucial operation? Your language of choice will typically expose details of its operations +([Python](https://docs.python.org/3/library/gc.html), +[Java](https://www.oracle.com/technetwork/java/javase/tech/vmoptions-jsp-140102.html#DebuggingOptions)). +Want to know how hard your program is stressing the TLB? Use `perf record` and look for +`dtlb_load_misses.miss_causes_a_walk`. + +Two final guiding questions, then: first, before attempting to apply some of the technology above to +your own systems, can you first identify +[where/when you care](http://wiki.c2.com/?PrematureOptimization) about "high-performance"? As an +example, if parts of a system rely on humans pushing buttons, CPU pinning won't have any measurable +effect. Humans are already far too slow to react in time. Second, if you're using benchmarks, are +they being designed in a way that's actually helpful? Tools like +[Criterion](http://www.serpentine.com/criterion/) (also in +[Rust](https://github.com/bheisler/criterion.rs)) and Google's +[Benchmark](https://github.com/google/benchmark) output not only average run time, but variance as +well; your benchmarking environment is subject to the same concerns your production environment is. + +Finally, I believe high-performance systems are a matter of philosophy, not necessarily technique. +Rigorous focus on variance is the first step, and there are plenty of ways to measure and mitigate +it; once that's at an acceptable level, then optimize for speed. diff --git a/_posts/2019-09-28-binary-format-shootout.md b/_posts/2019-09-28-binary-format-shootout.md new file mode 100644 index 0000000..675dc37 --- /dev/null +++ b/_posts/2019-09-28-binary-format-shootout.md @@ -0,0 +1,263 @@ +--- +layout: post +title: "Binary Format Shootout" +description: "Cap'n Proto vs. Flatbuffers vs. SBE" +category: +tags: [rust] +--- + +I've found that in many personal projects, +[analysis paralysis](https://en.wikipedia.org/wiki/Analysis_paralysis) is particularly deadly. +Making good decisions in the beginning avoids pain and suffering later; if extra research prevents +future problems, I'm happy to continue ~~procrastinating~~ researching indefinitely. + +So let's say you're in need of a binary serialization format. Data will be going over the network, +not just in memory, so having a schema document and code generation is a must. Performance is +crucial, so formats that support zero-copy de/serialization are given priority. And the more +languages supported, the better; I use Rust, but can't predict what other languages this could +interact with. + +Given these requirements, the candidates I could find were: + +1. [Cap'n Proto](https://capnproto.org/) has been around the longest, and is the most established +2. [Flatbuffers](https://google.github.io/flatbuffers/) is the newest, and claims to have a simpler + encoding +3. [Simple Binary Encoding](https://github.com/real-logic/simple-binary-encoding) has the simplest + encoding, but the Rust implementation is unmaintained + +Any one of these will satisfy the project requirements: easy to transmit over a network, reasonably +fast, and polyglot support. But how do you actually pick one? It's impossible to know what issues +will follow that choice, so I tend to avoid commitment until the last possible moment. + +Still, a choice must be made. Instead of worrying about which is "the best," I decided to build a +small proof-of-concept system in each format and pit them against each other. All code can be found +in the [repository](https://github.com/speice-io/marketdata-shootout) for this post. + +We'll discuss more in detail, but a quick preview of the results: + +- Cap'n Proto: Theoretically performs incredibly well, the implementation had issues +- Flatbuffers: Has some quirks, but largely lived up to its "zero-copy" promises +- SBE: Best median and worst-case performance, but the message structure has a limited feature set + +# Prologue: Binary Parsing with Nom + +Our benchmark system will be a simple data processor; given depth-of-book market data from +[IEX](https://iextrading.com/trading/market-data/#deep), serialize each message into the schema +format, read it back, and calculate total size of stock traded and the lowest/highest quoted prices. +This test isn't complex, but is representative of the project I need a binary format for. + +But before we make it to that point, we have to actually read in the market data. To do so, I'm +using a library called [`nom`](https://github.com/Geal/nom). Version 5.0 was recently released and +brought some big changes, so this was an opportunity to build a non-trivial program and get +familiar. + +If you don't already know about `nom`, it's a "parser generator". By combining different smaller +parsers, you can assemble a parser to handle complex structures without writing tedious code by +hand. For example, when parsing +[PCAP files](https://www.winpcap.org/ntar/draft/PCAP-DumpFileFormat.html#rfc.section.3.3): + +``` + 0 1 2 3 + 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 2 3 4 5 6 7 8 9 0 1 + +---------------------------------------------------------------+ + 0 | Block Type = 0x00000006 | + +---------------------------------------------------------------+ + 4 | Block Total Length | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + 8 | Interface ID | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +12 | Timestamp (High) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +16 | Timestamp (Low) | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +20 | Captured Len | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ +24 | Packet Len | + +-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+-+ + | Packet Data | + | ... | +``` + +...you can build a parser in `nom` that looks like +[this](https://github.com/speice-io/marketdata-shootout/blob/369613843d39cfdc728e1003123bf87f79422497/src/parsers.rs#L59-L93): + +```rust +const ENHANCED_PACKET: [u8; 4] = [0x06, 0x00, 0x00, 0x00]; +pub fn enhanced_packet_block(input: &[u8]) -> IResult<&[u8], &[u8]> { + let ( + remaining, + ( + block_type, + block_len, + interface_id, + timestamp_high, + timestamp_low, + captured_len, + packet_len, + ), + ) = tuple(( + tag(ENHANCED_PACKET), + le_u32, + le_u32, + le_u32, + le_u32, + le_u32, + le_u32, + ))(input)?; + + let (remaining, packet_data) = take(captured_len)(remaining)?; + Ok((remaining, packet_data)) +} +``` + +While this example isn't too interesting, more complex formats (like IEX market data) are where +[`nom` really shines](https://github.com/speice-io/marketdata-shootout/blob/369613843d39cfdc728e1003123bf87f79422497/src/iex.rs). + +Ultimately, because the `nom` code in this shootout was the same for all formats, we're not too +interested in its performance. Still, it's worth mentioning that building the market data parser was +actually fun; I didn't have to write tons of boring code by hand. + +# Part 1: Cap'n Proto + +Now it's time to get into the meaty part of the story. Cap'n Proto was the first format I tried +because of how long it has supported Rust (thanks to [dwrensha](https://github.com/dwrensha) for +maintaining the Rust port since +[2014!](https://github.com/capnproto/capnproto-rust/releases/tag/rustc-0.10)). However, I had a ton +of performance concerns once I started using it. + +To serialize new messages, Cap'n Proto uses a "builder" object. This builder allocates memory on the +heap to hold the message content, but because builders +[can't be re-used](https://github.com/capnproto/capnproto-rust/issues/111), we have to allocate a +new buffer for every single message. I was able to work around this with a +[special builder](https://github.com/speice-io/marketdata-shootout/blob/369613843d39cfdc728e1003123bf87f79422497/src/capnp_runner.rs#L17-L51) +that could re-use the buffer, but it required reading through Cap'n Proto's +[benchmarks](https://github.com/capnproto/capnproto-rust/blob/master/benchmark/benchmark.rs#L124-L156) +to find an example, and used +[`std::mem::transmute`](https://doc.rust-lang.org/std/mem/fn.transmute.html) to bypass Rust's borrow +checker. + +The process of reading messages was better, but still had issues. Cap'n Proto has two message +encodings: a ["packed"](https://capnproto.org/encoding.html#packing) representation, and an +"unpacked" version. When reading "packed" messages, we need a buffer to unpack the message into +before we can use it; Cap'n Proto allocates a new buffer for each message we unpack, and I wasn't +able to figure out a way around that. In contrast, the unpacked message format should be where Cap'n +Proto shines; its main selling point is that there's [no decoding step](https://capnproto.org/). +However, accomplishing zero-copy deserialization required code in the private API +([since fixed](https://github.com/capnproto/capnproto-rust/issues/148)), and we allocate a vector on +every read for the segment table. + +In the end, I put in significant work to make Cap'n Proto as fast as possible, but there were too +many issues for me to feel comfortable using it long-term. + +# Part 2: Flatbuffers + +This is the new kid on the block. After a +[first attempt](https://github.com/google/flatbuffers/pull/3894) didn't pan out, official support +was [recently launched](https://github.com/google/flatbuffers/pull/4898). Flatbuffers intends to +address the same problems as Cap'n Proto: high-performance, polyglot, binary messaging. The +difference is that Flatbuffers claims to have a simpler wire format and +[more flexibility](https://google.github.io/flatbuffers/flatbuffers_benchmarks.html). + +On the whole, I enjoyed using Flatbuffers; the [tooling](https://crates.io/crates/flatc-rust) is +nice, and unlike Cap'n Proto, parsing messages was actually zero-copy and zero-allocation. However, +there were still some issues. + +First, Flatbuffers (at least in Rust) can't handle nested vectors. This is a problem for formats +like the following: + +``` +table Message { + symbol: string; +} +table MultiMessage { + messages:[Message]; +} +``` + +We want to create a `MultiMessage` which contains a vector of `Message`, and each `Message` itself +contains a vector (the `string` type). I was able to work around this by +[caching `Message` elements](https://github.com/speice-io/marketdata-shootout/blob/e9d07d148bf36a211a6f86802b313c4918377d1b/src/flatbuffers_runner.rs#L83) +in a `SmallVec` before building the final `MultiMessage`, but it was a painful process that I +believe contributed to poor serialization performance. + +Second, streaming support in Flatbuffers seems to be something of an +[afterthought](https://github.com/google/flatbuffers/issues/3898). Where Cap'n Proto in Rust handles +reading messages from a stream as part of the API, Flatbuffers just sticks a `u32` at the front of +each message to indicate the size. Not specifically a problem, but calculating message size without +that tag is nigh on impossible. + +Ultimately, I enjoyed using Flatbuffers, and had to do significantly less work to make it perform +well. + +# Part 3: Simple Binary Encoding + +Support for SBE was added by the author of one of my favorite +[Rust blog posts](https://web.archive.org/web/20190427124806/https://polysync.io/blog/session-types-for-hearty-codecs/). +I've [talked previously]({% post_url 2019-06-31-high-performance-systems %}) about how important +variance is in high-performance systems, so it was encouraging to read about a format that +[directly addressed](https://github.com/real-logic/simple-binary-encoding/wiki/Why-Low-Latency) my +concerns. SBE has by far the simplest binary format, but it does make some tradeoffs. + +Both Cap'n Proto and Flatbuffers use [message offsets](https://capnproto.org/encoding.html#structs) +to handle variable-length data, [unions](https://capnproto.org/language.html#unions), and various +other features. In contrast, messages in SBE are essentially +[just structs](https://github.com/real-logic/simple-binary-encoding/blob/master/sbe-samples/src/main/resources/example-schema.xml); +variable-length data is supported, but there's no union type. + +As mentioned in the beginning, the Rust port of SBE works well, but is +[essentially unmaintained](https://users.rust-lang.org/t/zero-cost-abstraction-frontier-no-copy-low-allocation-ordered-decoding/11515/9). +However, if you don't need union types, and can accept that schemas are XML documents, it's still +worth using. SBE's implementation had the best streaming support of all formats I tested, and +doesn't trigger allocation during de/serialization. + +# Results + +After building a test harness +[for](https://github.com/speice-io/marketdata-shootout/blob/master/src/capnp_runner.rs) +[each](https://github.com/speice-io/marketdata-shootout/blob/master/src/flatbuffers_runner.rs) +[format](https://github.com/speice-io/marketdata-shootout/blob/master/src/sbe_runner.rs), it was +time to actually take them for a spin. I used +[this script](https://github.com/speice-io/marketdata-shootout/blob/master/run_shootout.sh) to run +the benchmarks, and the raw results are +[here](https://github.com/speice-io/marketdata-shootout/blob/master/shootout.csv). All data reported +below is the average of 10 runs on a single day of IEX data. Results were validated to make sure +that each format parsed the data correctly. + +## Serialization + +This test measures, on a +[per-message basis](https://github.com/speice-io/marketdata-shootout/blob/master/src/main.rs#L268-L272), +how long it takes to serialize the IEX message into the desired format and write to a pre-allocated +buffer. + +| Schema | Median | 99th Pctl | 99.9th Pctl | Total | +| :------------------- | :----- | :-------- | :---------- | :----- | +| Cap'n Proto Packed | 413ns | 1751ns | 2943ns | 14.80s | +| Cap'n Proto Unpacked | 273ns | 1828ns | 2836ns | 10.65s | +| Flatbuffers | 355ns | 2185ns | 3497ns | 14.31s | +| SBE | 91ns | 1535ns | 2423ns | 3.91s | + +## Deserialization + +This test measures, on a +[per-message basis](https://github.com/speice-io/marketdata-shootout/blob/master/src/main.rs#L294-L298), +how long it takes to read the previously-serialized message and perform some basic aggregation. The +aggregation code is the same for each format, so any performance differences are due solely to the +format implementation. + +| Schema | Median | 99th Pctl | 99.9th Pctl | Total | +| :------------------- | :----- | :-------- | :---------- | :----- | +| Cap'n Proto Packed | 539ns | 1216ns | 2599ns | 18.92s | +| Cap'n Proto Unpacked | 366ns | 737ns | 1583ns | 12.32s | +| Flatbuffers | 173ns | 421ns | 1007ns | 6.00s | +| SBE | 116ns | 286ns | 659ns | 4.05s | + +# Conclusion + +Building a benchmark turned out to be incredibly helpful in making a decision; because a "union" +type isn't important to me, I can be confident that SBE best addresses my needs. + +While SBE was the fastest in terms of both median and worst-case performance, its worst case +performance was proportionately far higher than any other format. It seems to be that +de/serialization time scales with message size, but I'll need to do some more research to understand +what exactly is going on. diff --git a/_posts/2019-12-14-release-the-gil.md b/_posts/2019-12-14-release-the-gil.md new file mode 100644 index 0000000..00b47a6 --- /dev/null +++ b/_posts/2019-12-14-release-the-gil.md @@ -0,0 +1,370 @@ +--- +layout: post +title: "Release the GIL" +description: "Strategies for Parallelism in Python" +category: +tags: [python] +--- + +Complaining about the [Global Interpreter Lock](https://wiki.python.org/moin/GlobalInterpreterLock) +(GIL) seems like a rite of passage for Python developers. It's easy to criticize a design decision +made before multi-core CPU's were widely available, but the fact that it's still around indicates +that it generally works [Good](https://wiki.c2.com/?PrematureOptimization) +[Enough](https://wiki.c2.com/?YouArentGonnaNeedIt). Besides, there are simple and effective +workarounds; it's not hard to start a +[new process](https://docs.python.org/3/library/multiprocessing.html) and use message passing to +synchronize code running in parallel. + +Still, wouldn't it be nice to have more than a single active interpreter thread? In an age of +asynchronicity and _M:N_ threading, Python seems lacking. The ideal scenario is to take advantage of +both Python's productivity and the modern CPU's parallel capabilities. + +Presented below are two strategies for releasing the GIL's icy grip without giving up on what makes +Python a nice language to start with. Bear in mind: these are just the tools, no claim is made about +whether it's a good idea to use them. Very often, unlocking the GIL is an +[XY problem](https://en.wikipedia.org/wiki/XY_problem); you want application performance, and the +GIL seems like an obvious bottleneck. Remember that any gains from running code in parallel come at +the expense of project complexity; messing with the GIL is ultimately messing with Python's memory +model. + +```python +%load_ext Cython +from numba import jit + +N = 1_000_000_000 +``` + +# Cython + +Put simply, [Cython](https://cython.org/) is a programming language that looks a lot like Python, +gets [transpiled](https://en.wikipedia.org/wiki/Source-to-source_compiler) to C/C++, and integrates +well with the [CPython](https://en.wikipedia.org/wiki/CPython) API. It's great for building Python +wrappers to C and C++ libraries, writing optimized code for numerical processing, and tons more. And +when it comes to managing the GIL, there are two special features: + +- The `nogil` + [function annotation](https://cython.readthedocs.io/en/latest/src/userguide/external_C_code.html#declaring-a-function-as-callable-without-the-gil) + asserts that a Cython function is safe to use without the GIL, and compilation will fail if it + interacts with Python in an unsafe manner +- The `with nogil` + [context manager](https://cython.readthedocs.io/en/latest/src/userguide/external_C_code.html#releasing-the-gil) + explicitly unlocks the CPython GIL while active + +Whenever Cython code runs inside a `with nogil` block on a separate thread, the Python interpreter +is unblocked and allowed to continue work elsewhere. We'll define a "busy work" function that +demonstrates this principle in action: + +```python +%%cython + +# Annotating a function with `nogil` indicates only that it is safe +# to call in a `with nogil` block. It *does not* release the GIL. +cdef unsigned long fibonacci(unsigned long n) nogil: + if n <= 1: + return n + + cdef unsigned long a = 0, b = 1, c = 0 + + c = a + b + for _i in range(2, n): + a = b + b = c + c = a + b + + return c + + +def cython_nogil(unsigned long n): + # Explicitly release the GIL while running `fibonacci` + with nogil: + value = fibonacci(n) + + return value + + +def cython_gil(unsigned long n): + # Because the GIL is not explicitly released, it implicitly + # remains acquired when running the `fibonacci` function + return fibonacci(n) +``` + +First, let's time how long it takes Cython to calculate the billionth Fibonacci number: + +```python +%%time +_ = cython_gil(N); +``` + +>
+> CPU times: user 365 ms, sys: 0 ns, total: 365 ms
+> Wall time: 372 ms
+> 
+ +```python +%%time +_ = cython_nogil(N); +``` + +>
+> CPU times: user 381 ms, sys: 0 ns, total: 381 ms
+> Wall time: 388 ms
+> 
+ +Both versions (with and without GIL) take effectively the same amount of time to run. Even when +running this calculation in parallel on separate threads, it is expected that the run time will +double because only one thread can be active at a time: + +```python +%%time +from threading import Thread + +# Create the two threads to run on +t1 = Thread(target=cython_gil, args=[N]) +t2 = Thread(target=cython_gil, args=[N]) +# Start the threads +t1.start(); t2.start() +# Wait for the threads to finish +t1.join(); t2.join() +``` + +>
+> CPU times: user 641 ms, sys: 5.62 ms, total: 647 ms
+> Wall time: 645 ms
+> 
+ +However, if the first thread releases the GIL, the second thread is free to acquire it and run in +parallel: + +```python +%%time + +t1 = Thread(target=cython_nogil, args=[N]) +t2 = Thread(target=cython_gil, args=[N]) +t1.start(); t2.start() +t1.join(); t2.join() +``` + +>
+> CPU times: user 717 ms, sys: 372 µs, total: 718 ms
+> Wall time: 358 ms
+> 
+ +Because `user` time represents the sum of processing time on all threads, it doesn't change much. +The ["wall time"](https://en.wikipedia.org/wiki/Elapsed_real_time) has been cut roughly in half +because each function is running simultaneously. + +Keep in mind that the **order in which threads are started** makes a difference! + +```python +%%time + +# Note that the GIL-locked version is started first +t1 = Thread(target=cython_gil, args=[N]) +t2 = Thread(target=cython_nogil, args=[N]) +t1.start(); t2.start() +t1.join(); t2.join() +``` + +>
+> CPU times: user 667 ms, sys: 0 ns, total: 667 ms
+> Wall time: 672 ms
+> 
+ +Even though the second thread releases the GIL while running, it can't start until the first has +completed. Thus, the overall runtime is effectively the same as running two GIL-locked threads. + +Finally, be aware that attempting to unlock the GIL from a thread that doesn't own it will crash the +**interpreter**, not just the thread attempting the unlock: + +```python +%%cython + +cdef int cython_recurse(int n) nogil: + if n <= 0: + return 0 + + with nogil: + return cython_recurse(n - 1) + +cython_recurse(2) +``` + +>
+> Fatal Python error: PyEval_SaveThread: NULL tstate
+> 
+> Thread 0x00007f499effd700 (most recent call first):
+>   File "/home/bspeice/.virtualenvs/release-the-gil/lib/python3.7/site-packages/ipykernel/parentpoller.py", line 39 in run
+>   File "/usr/lib/python3.7/threading.py", line 926 in _bootstrap_inner
+>   File "/usr/lib/python3.7/threading.py", line 890 in _bootstrap
+> 
+ +In practice, avoiding this issue is simple. First, `nogil` functions probably shouldn't contain +`with nogil` blocks. Second, Cython can +[conditionally acquire/release](https://cython.readthedocs.io/en/latest/src/userguide/external_C_code.html#conditional-acquiring-releasing-the-gil) +the GIL, so these conditions can be used to synchronize access. Finally, Cython's documentation for +[external C code](https://cython.readthedocs.io/en/latest/src/userguide/external_C_code.html#acquiring-and-releasing-the-gil) +contains more detail on how to safely manage the GIL. + +To conclude: use Cython's `nogil` annotation to assert that functions are safe for calling when the +GIL is unlocked, and `with nogil` to actually unlock the GIL and run those functions. + +# Numba + +Like Cython, [Numba](https://numba.pydata.org/) is a "compiled Python." Where Cython works by +compiling a Python-like language to C/C++, Numba compiles Python bytecode _directly to machine code_ +at runtime. Behavior is controlled with a special `@jit` decorator; calling a decorated function +first compiles it to machine code before running. Calling the function a second time re-uses that +machine code unless the argument types have changed. + +Numba works best when a `nopython=True` argument is added to the `@jit` decorator; functions +compiled in [`nopython`](http://numba.pydata.org/numba-doc/latest/user/jit.html?#nopython) mode +avoid the CPython API and have performance comparable to C. Further, adding `nogil=True` to the +`@jit` decorator unlocks the GIL while that function is running. Note that `nogil` and `nopython` +are separate arguments; while it is necessary for code to be compiled in `nopython` mode in order to +release the lock, the GIL will remain locked if `nogil=False` (the default). + +Let's repeat the same experiment, this time using Numba instead of Cython: + +```python +# The `int` type annotation is only for humans and is ignored +# by Numba. +@jit(nopython=True, nogil=True) +def numba_nogil(n: int) -> int: + if n <= 1: + return n + + a = 0 + b = 1 + + c = a + b + for _i in range(2, n): + a = b + b = c + c = a + b + + return c + + +# Run using `nopython` mode to receive a performance boost, +# but GIL remains locked due to `nogil=False` by default. +@jit(nopython=True) +def numba_gil(n: int) -> int: + if n <= 1: + return n + + a = 0 + b = 1 + + c = a + b + for _i in range(2, n): + a = b + b = c + c = a + b + + return c + + +# Call each function once to force compilation; we don't want +# the timing statistics to include how long it takes to compile. +numba_nogil(N) +numba_gil(N); +``` + +We'll perform the same tests as above; first, figure out how long it takes the function to run: + +```python +%%time +_ = numba_gil(N) +``` + +>
+> CPU times: user 253 ms, sys: 258 µs, total: 253 ms
+> Wall time: 251 ms
+> 
+ + +Aside: it's not immediately clear why Numba takes ~20% less time to run than Cython for code that should be +effectively identical after compilation. + + +When running two GIL-locked threads, the result (as expected) takes around twice as long to compute: + +```python +%%time +t1 = Thread(target=numba_gil, args=[N]) +t2 = Thread(target=numba_gil, args=[N]) +t1.start(); t2.start() +t1.join(); t2.join() +``` + +>
+> CPU times: user 541 ms, sys: 3.96 ms, total: 545 ms
+> Wall time: 541 ms
+> 
+ +But if the GIL-unlocking thread starts first, both threads run in parallel: + +```python +%%time +t1 = Thread(target=numba_nogil, args=[N]) +t2 = Thread(target=numba_gil, args=[N]) +t1.start(); t2.start() +t1.join(); t2.join() +``` + +>
+> CPU times: user 551 ms, sys: 7.77 ms, total: 559 ms
+> Wall time: 279 ms
+> 
+ +Just like Cython, starting the GIL-locked thread first leads to poor performance: + +```python +%%time +t1 = Thread(target=numba_gil, args=[N]) +t2 = Thread(target=numba_nogil, args=[N]) +t1.start(); t2.start() +t1.join(); t2.join() +``` + +>
+> CPU times: user 524 ms, sys: 0 ns, total: 524 ms
+> Wall time: 522 ms
+> 
+ +Finally, unlike Cython, Numba will unlock the GIL if and only if it is currently acquired; +recursively calling `@jit(nogil=True)` functions is perfectly safe: + +```python +from numba import jit + +@jit(nopython=True, nogil=True) +def numba_recurse(n: int) -> int: + if n <= 0: + return 0 + + return numba_recurse(n - 1) + +numba_recurse(2); +``` + +# Conclusion + +Before finishing, it's important to address pain points that will show up if these techniques are +used in a more realistic project: + +First, code running in a GIL-free context will likely also need non-trivial data structures; +GIL-free functions aren't useful if they're constantly interacting with Python objects whose access +requires the GIL. Cython provides +[extension types](http://docs.cython.org/en/latest/src/tutorial/cdef_classes.html) and Numba +provides a [`@jitclass`](https://numba.pydata.org/numba-doc/dev/user/jitclass.html) decorator to +address this need. + +Second, building and distributing applications that make use of Cython/Numba can be complicated. +Cython packages require running the compiler, (potentially) linking/packaging external dependencies, +and distributing a binary wheel. Numba is generally simpler because the code being distributed is +pure Python, but can be tricky since errors aren't detected until runtime. + +Finally, while unlocking the GIL is often a solution in search of a problem, both Cython and Numba +provide tools to directly manage the GIL when appropriate. This enables true parallelism (not just +[concurrency](https://stackoverflow.com/a/1050257)) that is impossible in vanilla Python. diff --git a/_posts/2022-11-20-webpack-industrial-complex.md b/_posts/2022-11-20-webpack-industrial-complex.md new file mode 100644 index 0000000..33fe67a --- /dev/null +++ b/_posts/2022-11-20-webpack-industrial-complex.md @@ -0,0 +1,60 @@ +--- +layout: post +title: "The webpack industrial complex" +description: "Reflections on a new project" +category: +tags: [webpack, react, vite] +--- + +This started because I wanted to build a synthesizer. Setting a goal of "digital DX7" was ambitious, but I needed something unrelated to the day job. Beyond that, working with audio seemed like a good challenge. I enjoy performance-focused code, and performance problems in audio are conspicuous. Building a web project was an obvious choice because of the web audio API documentation and independence from a large Digital Audio Workstation (DAW). + +The project was soon derailed trying to sort out technical issues unrelated to the original purpose. Finding a resolution was a frustrating journey, and it's still not clear whether those problems were my fault. As a result, I'm writing this to try making sense of it, as a case study/reference material, and to salvage something from the process. + +## Starting strong + +The sole starting requirement was to write everything in TypeScript. Not because of project scale, but because guardrails help with unfamiliar territory. Keeping that in mind, the first question was: how does one start a new project? All I actually need is "compile TypeScript, show it in a browser." + +Create React App (CRA) came to the rescue and the rest of that evening was a joy. My TypeScript/JavaScript skills were rusty, but the online documentation was helpful. I had never understood the appeal of JSX (why put a DOM in JavaScript?) until it made connecting an `onEvent` handler and a function easy. + +Some quick dimensional analysis later and there was a sine wave oscillator playing A=440 through the speakers. I specifically remember thinking "modern browsers are magical." + +## Continuing on + +Now comes the first mistake: I began to worry about "scale" before encountering an actual problem. Rather than rendering audio in the main thread, why not use audio worklets and render in a background thread instead? + +The first sign something was amiss came from the TypeScript compiler errors showing the audio worklet API [was missing](https://github.com/microsoft/TypeScript/issues/28308). After searching out Github issues and (unsuccessfully) tweaking the `.tsconfig` settings, I settled on installing a package and moving on. + +The next problem came from actually using the API. Worklets must load from separate "modules," but it wasn't clear how to guarantee the worklet code stayed separate from the application. I saw recommendations to use `new URL(, import.meta.url)` and it worked! Well, kind of: + +![Browser error](/assets/images/2022-11-20-video_mp2t.png) + +That file has the audio processor code, so why does it get served with `Content-Type: video/mp2t`? + +## Floundering about + +Now comes the second mistake: even though I didn't understand the error, I ignored recommendations to [just use JavaScript](https://hackernoon.com/implementing-audioworklets-with-react-8a80a470474) and stuck by the original TypeScript requirement. + +I tried different project structures. Moving the worklet code to a new folder didn't help, nor did setting up a monorepo and placing it in a new package. + +I tried three different CRA tools - `react-app-rewired`, `craco`, `customize-react-app` - but got the same problem. Each has varying levels of compatibility with recent CRA versions, so it wasn't clear if I had the right solution but implemented it incorrectly. After attempting to eject the application and panicking after seeing the configuration, I abandoned that as well. + +I tried changing the webpack configuration: using [new](https://github.com/webpack/webpack/issues/11543#issuecomment-917673256) [loaders](https://github.com/popelenkow/worker-url), setting [asset rules](https://github.com/webpack/webpack/discussions/14093#discussioncomment-1257149), even [changing how webpack detects worker resources](https://github.com/webpack/webpack/issues/11543#issuecomment-826897590). In hindsight, entry points may have been the answer. But because CRA actively resists attempts to change its webpack configuration, and I couldn't find audio worklet examples in any other framework, I gave up. + +I tried so many application frameworks. Next.js looked like a good candidate, but added its own [bespoke webpack complexity](https://github.com/vercel/next.js/issues/24907) to the existing confusion. Astro had the best "getting started" experience, but I refuse to install an IDE-specific plugin. I first used Deno while exploring Lume, but it couldn't import the audio worklet types (maybe because of module compatibility?). Each framework was unique in its own way (shout-out to SvelteKit) but I couldn't figure out how to make them work. + +## Learning and reflecting + +I ended up using Vite and vite-plugin-react-pages to handle both "build the app" and "bundle worklets," but the specific tool choice isn't important. Instead, the focus should be on lessons learned. + +For myself: + +- I'm obsessed with tooling, to the point it can derail the original goal. While it comes from a good place (for example: "types are awesome"), it can get in the way of more important work +- I tend to reach for online resources right after seeing a new problem. While finding help online is often faster, spending time understanding the problem would have been more productive than cycling through (often outdated) blog posts + +For the tools: + +- Resource bundling is great and solves a genuine challenge. I've heard too many horror stories of developers writing modules by hand to believe this is unnecessary complexity +- Webpack is a build system and modern frameworks are deeply dependent on it (hence the "webpack industrial complex"). While this often saves users from unnecessary complexity, there's no path forward if something breaks +- There's little ability to mix and match tools across frameworks. Next.js and Gatsby let users extend webpack, but because each framework adds its own modules, changes aren't portable. After spending a week looking at webpack, I had an example running with parcel in thirty minutes, but couldn't integrate it + +In the end, learning new systems is fun, but a focus on tools that "just work" can leave users out in the cold if they break down. \ No newline at end of file diff --git a/assets/css/fonts.css b/assets/css/fonts.css new file mode 100644 index 0000000..e07ecfc --- /dev/null +++ b/assets/css/fonts.css @@ -0,0 +1,15 @@ +@font-face { + font-family: 'JetBrains Mono'; + src: url('/assets/font/JetBrainsMono-Regular.woff2') format('woff2'), + url('/assets/font/JetBrainsMono-Regular.woff') format('woff'); + font-weight: normal; + font-style: normal; +} + +@font-face { + font-family: 'Lato'; + src: url('/assets/font/lato-regular-webfont.woff2') format('woff2'), + url('/assets/font/lato-regular-webfont.woff') format('woff'); + font-weight: normal; + font-style: normal; +} \ No newline at end of file diff --git a/assets/css/style.scss b/assets/css/style.scss new file mode 100644 index 0000000..ea280e2 --- /dev/null +++ b/assets/css/style.scss @@ -0,0 +1,119 @@ +--- +--- + +// Import the theme rules +@import "theme"; + +body { + max-width: 100%; + overflow-x: hidden; + font-family: 'Lato', sans-serif; +} + +.navbar { + color: $gray; +} + +.separator { + margin-right: .45rem; + margin-left: .25rem; + color: #000; + &:after { + content: '\00a0/'; + } +} + +header { + padding-top: 80px; + padding-bottom: 0; +}; + +header h1,h2 { + color: #000; +} + +.post-description { + color: #555; +} + +.post-container a { + color: #555; + border-bottom-color: $gray; + border-bottom-style: dotted; + border-bottom-width: 1px; + + position: relative; + display: inline-block; + padding: 1px 1px; + transition: color ease 0.3s; + + &::after { + content: ''; + position: absolute; + z-index: -1; + width: 100%; + height: 0%; + left: 0; + bottom: 0; + background-color: $gray; + transition: all ease 0.3s; + } + + &:hover { + color: #fff; + border-bottom-style: solid; + &::after { + height: 100%; + } + } +} + +body pre { + font-size: 15px; +} + +pre.highlight, code { + font-family: 'JetBrains Mono', monospace; +} + +div.highlighter-rouge { + // Default theme uses `width: 100vw`, which while cool, does cause the page + // to exceed screen width and trigger horizontal scrolling. No bueno. + width: 99vw; +} + +.post-date { + // On the front page, make sure titles don't force wrapping the date box content + text-align: right; + white-space: nowrap; +} + +blockquote { + color: #555; + right: 100px; + margin-left: 0; + padding-left: 1.8rem; + border-left: 5px solid $gray; +} + +.post-nav { + /* Insert your custom styling here. Example: + + font-size: 14px; + */ + display: flex; + margin-top: 1em; + margin-bottom: 1em; +} +.post-nav div { + /* flex-grow, flex-shrink, flex-basis */ + flex: 1 1 0; +} +.post-nav-next { + text-align: right; +} + +th, td { + border-bottom: 1px solid $gray; + padding: 0.75em; +} diff --git a/assets/font/JetBrainsMono-Regular.woff b/assets/font/JetBrainsMono-Regular.woff new file mode 100644 index 0000000000000000000000000000000000000000..dc1d85f5706143d7b21492f7d5c296ec3db13b9f GIT binary patch literal 59368 zcmY&eFZM zs&SKXgnoFv007n!0N}Wd+~2<;r$qmA9~kzBqwxbh zxz}$=Lu-9I0068K0003706;kc+ur94U7QF307R`Hj`9z33HA_%P3%mq0RYHa008_B z0Dwo!W<#4d)pz{SPDTE3ApZ}DrdIAI00731-=aK|2-Sw4@kZz0h?ylPHsQ`%znK5cn9~W8jMA^vNinSA^tb7)eoH9cqam^ z_1%8v1-ke@Eg@teB({ydwJ`wDx%#6&`r(mKt-f{J**ZD_06o0`0NljSG%HK)A#8RI z#y?tw!XItx50-tp5@Fmj4fTxl^Z@@Zmb)o$KiG7!4$^c1qL?944<6Mem)$Y21kR(Rqc@&D=g9;WJE;Q#dc;d+_9z2m+8i$kb=y}i3|d06|HPFUJln5nJm zoN1V8es^%Nzt3P{kt1Q{cMl75d+q%E&-+Y{l)m{G85nX54jlFLQsLtb6%E?~g{YXT z0KQ4M;Qzc;DalEo>ZAJ7y2Zi8J=H@>w?am=5=PWRM%2kikPgX5!UxtvBD+z5A~>Jz zidO!jFb^ZSnMHQ18_+2XlbWS;zFmI-@ue9Du?fx!a(fTf6noRCZ*12UUR z7!gi5M;Bnafa@gVje*+BoQG9{y3J^gF^^2bsZrus+hmu#A$mDdXnqo9Q9XKe%4Ec5 ztj8UyqVwd>W6-i&M&L!j1+TN#tFEIN^378f@Z=2c`4C1mN|#p_XhW24C`B0f4@&Cn zMYz5C8cwR$5Z#_YiD|TVc8#rcwx3yk%19|b&$1VAAtdpsq5o@#q%z!EFITFn_EUG1 z9XxF-Ab-2aP|+O+ElUiR@_|$YHZfol8i@ahpAjckdhuQcRsK-55XltMy$J7L`#-EL z*Mcx6GWrKX$Hu?Ww2CUvX3;jnJIkC%HYl@+Ui01g`gT@>#^-GboDstFK;31!5x)$% zPx?VSOLiq2M;PS>7Q>c`rDw6lmZEyM{k&y0e+w%@UeH?S0KtJjady%^8LBnRhV`ur z!7G|8ji3jHL_TqqG$dn7ef=`o>w;0z1jj11M#cqU&IqiG+<@jSq*OKlDLpye^3%qi z=Xs)gnf{d_>;Dbxi5B&(M7A`5rY9bZFK@)#u|&JySdpOhSJ zV%Kp_@D@_uU$AXEQvr9YqM`sV-gP~G8{J8~y@%ISyxf}6)AsBJq3z_i;?a%AyT!C2 z&7rdOe1)YFbfvyZa4YR|an;sxMsViZ8sa(2?LhDM9$urTy@%D3Lv3W|I(6v!0ohpL zm92o#3G!@c{rCEYL}&ZhSO=8$KGmaNZi!0nu22NxE$`{^YB3nmxyU}aa#BG;XqpRB z%Z=KdUnQvkS0Q5?6unhIucj_s8^5=>CD0Hqx#^K8BO+7J;%hGO!(s8(M9Bvq~- z992LZQLZ2GqtF@@w+3a4h=_s)tpO0_7ooo&C^NGk(wp!TCW_6-fVHs1OCNvFfz{ZU zdw1qB81Q1WYG^j&{DWk^=|bqrInA3=44=n}U$B=%*koMn-290(E}IYQ7i1zB)ubBp z1g>E;D?hvRlyo0hd7YH<7*(lNQh9qhet8UbWpM?TMK_g#zKRX0+7!)(RMUJVYqS2y zs=Z6+)}m~4po=N@0KTc*@3mBi#i6Z#SGS)V+M;XWGJjdQB^waTpH6#yrqCPW(hQH% zf=pIBF==KmK$8kJDA9~v|C~Bs1bD6nIbV>aZ~cS!6oOWGB6DpqiTJMZUFZRP0rf0i zAB-$bR#BFY#h{7`#cfK+l5oePnWI2$mUHb)Y@DKba(7;Cv)MMZtZNi=`nZ%cyyQV@D5eG zY(cVTDTcZwiIq@FpUWiCi2W^{HM|UJUcjuUzmV)w7%Sz;H5(`Ebfk~}D~isk2xse5 zP|o9)qRTzfDQj!wRGL3~S2F7m2-XNO&^^2aO9)&TYuHXKc1)svlq^ImTOW1i4tM4r znEnpA0Bd04fHB#=v)^s&8<@}JR|HgDIGmL}v*y|~o68NUuEM0O6l6n$qRTz&PHjk+ z*GS97F~m4Uimn^?+epFz;`OPPoMCw89v;~QE1!Op7MX7K1f2D*_N#RHVf?{3nzeqj ziErOCs+qXbp)D5SpTm5Ydw<+zU#^tb%0y7fL?+4^?|tSqAvfV^q6*Ei%jcbaCwQEd=6a-cJSik3jf_yhkX-*PLUZ+0dbg_} z(xTOlL>_Z^qAwC;@-|nQiWcb4WrqWY%orwM9TP+xFlZZBAW0UZsXSYrhfPam!O26n zeAbUpiaknl+aN@H(_q4wD^%-CVy*)OdKxSBSzwf{#{FV_JQ| zvb6q^v|NI@$UAe@*HJ(-32k|*9Wk;)>J;uvJjR;#O8QKXvTQ$4(h80m?14mdF7*Zl zJ5t^j#c9W>4sKa1lJvRp^j&nb<;5FHS^&%sox;_#X<4!xCiVc#c>vU~5uFrFd(@%S?P-HB`Zq8usEKM}-`^LU+&b{l33|YH}vrvcv?7 z09q|sNJ$Ja*-8-$m3C_tn#_i*YQq#ee&@@BVo_^cE}c}2Aw&(WrO*|H!4xM|ioYzs zTQnOB1(V|Gz-(`T0HcVsw`K?hqzN^~xQ#{uL`Z=^EDH{?qY6akSo)T8;a8X9>gKI@ zx+Ouv&WriwSC?X45H9o9dfgUBmRu;*eFGGl>QJMS-WpFY<9UrA7s%fY$5gc= zexvq(hN>pg3u}nH(o!ViO4S9YfwFxZkE4mxM3mon=MXCd{!*K;Xu$VAX`3OosjzFl zYC5T(#!dc%=$4$bpdNSV$VFY}6X+*Gebs4&_|j}>xcolcUX=GFjXUv?MNZxP$JX5D#?G?! za6(3R1k;M5?JldK(q7g&z@M;J|E!yRz@=IFSIgs}-$T{qO#koIsE*H}h}-o3=KP(g z;cEoPltcf_+2AMd0(v$^#W(Uti?^J$NlPZzy9ds#ABgEjjAnvO^9kuz_on&Wk#9kM zKSXX%BfcicCpMz*$Uzwx0ykO6ILVgo5jLxz5Cw6 zP%k%bQ&M6U(sZY1I@!h(Px&s}ft4f_drH;TNh`_3JTF~UQci@oO`GG%_V|T7HLa`3$a-WUE}dH>0L}RFKm^k)2AtNFST0kW+38 zeVaL=x}G>7K5Fw+*Q2yFz?B4hzOR=p@wWI{Eb%*DH@JYNU%AaFwpN{1vj+DT1>#AzYJ*DlfIjckD26zKP`l|7{O-vAaCwcT_G4ygcVey$l z`c_g0cF*51yBGlBez<`K-WRKF;sP#Lct@O(-F-ZP+s4^~J4Qb+$oeJF&xKW3na~Gx zZp5HBeGU*lV&E`>4;-1)zp*x4VViZ0)Q;@7yfAp+HN+^m#_7QP>|Gr?2^ppn<7aYV zd}B1&Oe4D?HxbeFyHtP{o*dGbw7TMx*t)) zRt?l1%r>&GhgC07->W>VOt2DSBlmn0ng-hnLc_&}_@}ckOJ7d+<;|FukO6W`bCl@= z<=vJoJ>GOUePpB4uN6QiN`Lte=0?Zsfa!?I$^GsQNKjI(HK4PChkbW zexA!b#p(BOY?u+5x={5m%Q5u&zzbsWQ3Z#9{yb%+MbuD(0yj$Tpk^W0ydDPS7uP)@ z{mir6XvL8-q%)dFR_C;Kv901Ow6yTIa4k_o(FD;2(fdX%4T33AoC2BoV)J_^+$;&x z`5-4SZDICV&l9>gOy1w$Az#5?+1;YwBK(y2G(Nh(Nvl$uq$s3Hq%fqn@hZW7Kun9U zp>q><%pZ}I!??u55)6C{(NaN0dGl@aYNr;bCZ{&1znG)K^pVq$i=*5ph3f5Sh$4m! z8bm5Rl9P>%;A+Lx_bVZ(`^ah|*1Vb%wP%*=XssYTyCSv8Xe;O!lGMj>jSFyrj%iJ! z8m89v3tP=>=u*ZH?JC;fHdW~co9wr=_}K@u_KN{5u@DSDy%b;z0I&xT`z}PWA15Jr zCkUIr_HJs(!Jaz{es98kA`eJS2>bNLKkt8dw+pXod^xrU5HI&1F^8Oo;|_mbRbRVv z1q4t|z!1Rf`^xo{ZUk$|CAK6wfVM)nC7_V{t@4CQk>CEJ^bbLcfE5cVAXQ0(AreJG zqEd=##Obim+=T<@=bAMsYEjnwrN*G<55F~N;vm`XRQF~iyfozWo&}Z#mlZv|HciVbOF#E~X8M4mT}mg2vB?l(&M&G6*-a zm3Z}LWBUC2IUUIwG*Y8r=4IH;jiA-Gm7q1pFBTCcAdiyb7Vs``t9Q`X3}uO&MpG=g z5MVwK$%K*mdIa7ucflf;UT~yxA4fVuR9i~Ri(X^Aez#n2&RK7wX|7tQV$7NvfXou(nSJPK!d%J`EX7SePo%7E6S^QP^W%sT4 zeG6~^@C)GFz~k~I^TYG_Jijcm=|!wI=ji1?=Fq^^U!J;DW|@_&y*}Z04B*#_UpPJ$ zdaw8#2yocJ2LxUta{q?kaQ(VY&f=8KbL?+>LVZ_$WO;e$C3S}D*oH&<5a$4e>k|$X z<`xba7`;~5scECMiSiuAbMA4jX-(h<@Jooz68p!U5}5i#M#qbJ7v%7o<7b8(0P(QJ z%%D7w@DT8bZ(G&$bMNtfNq#Q;Q`=tcRXa0%oQde>*~z7UmA<0i@CoP{$+^kl+vmse zX~ypbll6&;RPx#XXl7=7!M*i5c)c~6I(WIx-$@Aw-<7aC2HO*TkI2S=$P1qRkbXJ* zn3%HjNJU@%E8aih`&(=_u6wjEr$On2@|gOdqUaCK!j0}|&&xM(SbyA4AGWaz#!d52 z5=G}TCl{B8`uyC&^1}SwfbB#w>FcizhkqQ=IpHYeb@9WJ>158Q#4_ekLCwEVxdb%K z=ylzl{1n&yPqXqa8&Ol4f^z1Q|Cj>v3!8Fnwhh2OOy9heq-c&WjQ1?_AxK7Rk9&Pz zi`08HK6853bUk_0G(pKx{hg(9^}iXEm@KSWG-PwYl{<#~#UuWb@V6jwLyjZ58haN- zn`Dp#8r*C~I$&-mEy+NUiR`Ce7{sUM?dRfNzOzqPY5LaI9rxH1*nt@Rar5u`L~|Dl z*gnCD5@=<0>ZF%vGlgHujcTz?NyDx zQ11Z$WE1}1Gqk7tM`k4*fEgMi(Wkrd3sQ_#YIY%*yUVbN8fjV5>yVYZxxqTirkR{> zetUjae%@Q>m$X@*J}DV(FXrQaDSDMzg)y)x&|NwmXu0zXr_7jc=~i&};L*qWQ)y3#`UMP8~glu1FcgsqLz>!7L zbk~>dX{G&wJx2 z>QC6RDu)!PzKEI{CFgvZvTgexm)C#QvI(4vr@=+0m<+=}p`G=>AaaSo)o+K5S-^u1 zUp{6=pf9h`cAT5@^Bq-&hi;@R3vqGZ2FH8nww;}c9J4J>Ybdw&JKnE4>zNz+El$51 z$;3L+={3y=MvL3ue0($Mc~1#T#nKl5byO1nH)RcUp~BQw`eoc(rVrxQj2Bd^L59zRJRsK@36r< zcK}^ILp}O1(6s7;+8Q4ZZek*sEesUduG()2iNAk1b2GyP*^%_b?j0AMQrm1iK3rIj zlxR^efArt(m55hU9OvNRv_Rc6M~kJ80#0SSaz1{{q{{x{L{S z%e3qx*!v8^=YmdzWh?cKBq;Hjlf`2ZDp3CG{Cnc%PdkH{y~@JvXgD)E%m!$JQ?D_y z)t=g98?t)XbP(LvFIWBcT^rVjWK~xmu4}BP+r3u`@8fLMf`g}2xV){qv+_=EBL)yo z=H@H&rge^l3Pi0FUC3$Bzz(6CU&?l2nQ+G}K+3B#hkjCWQ+(KgkN5o=(24AXGmMya zOJ+cEtbiyuC*Q4>M51!VfqznClM=-Xp`mGD;h>4grLjD>hakGa-wzMWGl@Cw!FyA} z7%{Ugu?Rq{7Ya8+!+MW&FAkx-hBR+E>{W?0qo00#x4oh9ofpNnoPX$iOW)u$^q#q7 z_a9U|u+v#mI&qL>6lZCUqVP%xO0)AzCMsv1>AS^d;zIPgZA4_%s8861Fp%x7bo4++ zCQhcFJr_=OylDT`GZc zuN*5Qw=8O-qRN&#U;k2~@L{t3tmmy8fwZsT17}t&us#D0hnw}s2IFAgzkMZH2hwW{ zL3pgDg}{1A0yDs0WrVe53cWYk0=*zr+ zF!vpy8_8NwXJn+dhqY<@Dq7ZlbJl9_XUwSuy}SzsVsgvEZK*?-bQSw^r{>tP4cnN@=Gbr%~+I_f{ zqE)a79czkfUIT=4+3q#%WCwk71MW8S3)6N{iN8213q>#_IS!YwL6_jyF2l(*u z$jI@Z>*iLG1}J}ti4lJD#@k^fsG0Va_UoxqAI^A-03hs34>O}y=6lD^4SpU zD+g$RtAGoAb$e#S;mPAjIsc1!k)n5ahy2#|lf>ho_wlNZdz59sJK3nRc}GMQZD<`) z?w~pf8q`M*+6%-XXe^=telK}%10*9psn~uf(jP8CE_ui9-hgGk;Sb>>8JwrHCHl}&+yJO37T_7Pg7VYeFtST4uvS=so z$>lm@+g_h@K1LMpu8f=x(8Cu2jN<+yu1V|mPPk~(<;asNaQzdIJ+sLc(l0!%`u215=v4UEITmeun@qS-7XqF;;>nP#KdxeTUVKf7;A#5?)TWzAo-8I_j7) zMhQB$OgLDYUUe_z(%(-~mKjTA9M-C3&Tg*0-`R0$=cCyd&7!tc7!ikx+e`)03~qm$ zGA6y?MWEr=>flBz2o@n?nPiE{s;LS}SVAh>)L(?ErKf}L?HGo1gql<^2OAvf$e}eU zHsfNPWFJF>K9#cKyi6q!D??l#Xi)RxK0Z=RN5n8N#79g{XPdqBJ=$Y;hHt>P?YcwR z*A#>8z!MK=uqkV$EOuA?aLpQw1)`gZ6RNuEw*S^(jjH8}UB^X_K_lrkqLv(+CdBQ# zym0l)n3wj?j^fDAl`4s`yxp$mM+-c#W7jjVgcn(wHELv+{z8oYCf$u_alpz2@eGJU6$bEwydWSL(RM>}&hX^Vb z=qTprqi7D!a`U+EU&|INGgx?}iUirz(JUz#q^h3L(HCkrLfZ-w_^E`K^c9wtX5Hen zf`cmGdA*qy;b19eg@d)${FP{;-rMG+%QI@MSMvz0*>yW+f2JxZ0JxBsylt&W7BYkEh$)q0(_t==Bo@f*t{3(n(Gx zsd_W1_{zx3m^8aHDEfD|dGqDs($cHpAA(KIZAXfYxkM4NFiV;My=8Ta29V1<$6V`vU~G<{gn})w(wH%$yY$ZRQTRsk^W=qk z3MG8y80GXhyg1*(<&v0(@0bgH4>KtSdt8WvNM(=vIwR zo02REWu`#4!rch5Gw!m3C#n;Y5|zrk`70FLZIw+%0xXU3ulw;0DejVyw!F)YU>opm zr`$$O25hSPu7)5p!i_($>sy1fRALs)`uc(%2>tR)xX4t+fGmHR@r!pnr?e zbN%p!Lmq)(qIT)xbsq(VcYc`=rD(8sxlY&iCW@m{ZJkY#xT8T{4K%8GSND6Dd=MO-VeIM{lgZ_O7WFJv zNSgrnfpM?t3D1#?7J_z+C5glf|OjQdfmu2SjZrw9$wS5)k zoxrffcwWu=Z)euaFYHeohA1aV&84M&E|1exgx`3@udyFT0hdtHMb#>Ojos8Jx`Taz z1ncCcJ;G|2ekDP3zu~yZ&ElXfP$&(o!l5z8>F;a9P{eu~%Xb^Jospl2^WTP@g^5=C z+S`BHVqolLZ}lGQ8n{kkInQV*zK~-QK-%^^?uAuL)vC|FXsmIbKKPBScfs9+J-^ zkyq)lI}HY@Wj%ZgrCQVjCNQ9BQ$>-<0x^!+0<%ZG)^s_-P#e87J>|8%cbnQ{i`xfdxx*)^`o{dso zDFuqj8qNtY$12i4LPr%UlbW4^7eHLjhn88Ky9qHnF3!!ThwTrgdk02A*)xFcH-N}o z(m9vjO-ho@Kw|$Hw`ZW~`QyN{R%v2T;zDdF9L@m(E0MgJE+BmTJ}x}Boab;voHQ;D zPEHQN->)4CJN^7LjUYDrsrVU@MNG`C;)ZL~Sq#QPl&b?jp2^)_>AyMydWE!zAb$oo zs0nhar(q+Btsqnx7>)$jIow)|DUsg0w@@W8(1ugPGo~5&@(Ka%SFBSSGJSAJ>-$)O z%6Gj0Q2snxrGpb=>KzqKR|sZMf``Kmhs?XV_TU5t<%@ku+$yJ_&#%{Y##GS}!<@2x zA5}Tt(J0F#Sm)z((*c^lNQW3yIl!JUOsHyZkv_LdIjX7X6g zM!jdmEj@f>j8R83H!(2*haJ&sPXn_i&4UY^r$*q)q^fLi{Q@*Eh)VFeQN3y< zs=v1Cz8P%LZh?eI?pW)>W+c&sB72q3G2%0P?of;Ub?+zi{b5>XBE^u~6w;p-?h6@o zloNJB7|IUfARLt~q?2g5)&wSU3Z$bq?T9VQ&;Lmn!^fZ^}B z;jKWB0vucj?|D`l8qY66eG?&hMAM#_i@MfdA^$ zEMn+<%VGNBMDl~|BmCOASrzRD|8+nF$j_xFWQ2;C=?B<{?&H>z2-G)q`;})}>Zy@09T=d+)?_RH8>LU9@yp?ka!| zGfZBv2{dTK6UQiL{MjwJr)RhC=pXH2Nv{%wZlwN7MhGAMEdUvS_x(l8U-^LuIm7%E z+XUrP`HLH`%H`v!<>6gq$r%lQ?(Z@!$}Z^o+iYius&bq{@hhV!$S^msmLMMpbc?Ph zs-=31=m%kK?(E%`?zZbr(}!-N@3%#om4WSw7D#xPcC2}_rI07LlmGQq&~dk%FC7mm z8ALxz=ya)D!*P7nj4}FTrPHApI0=~ZjyVGVtWEWvkDI0y)t6b` zNQ5YmDWFPn{oMKzzT*Xq*c#n}H)-;LM zeL7D6cl;?gV&%HA2&RUOfYP{xv=+stxafQ*KKw~pSN>M-PSbvkx}93De^b)M#l+lC zxD~L+{|4};5FQY=a`3M$uBPFKh*2CXCYCl<91KIoc|oex{^+>?9?8jkcek$yL!?#x zp&LbwWX+A61*iP_29enut|+eFkX9dyPKIp-*|JvD5F6Df(-P`Sy0p=hSt>`FPHsx;$eL1vz}b zeldMq>3ZvXnV5Wh3_eV9(9us%K%GB(aGjsv%&Y4wKSC>{E;}s)$xL#&6;u&j;R<9q z>y@aPx5tgCA1lPRdg+>&d3=1lJjk1TGv-ft6@oibqlIoP0vw0`DNCE-O*%qPvU|a- zoXcE7-Z%eUR6NE9kxG?fZYd8ICFDoPHUJ&RPDOf1GKPCe!W-(l+<*`#KMe;qk#rbL z$Jd#uEbrx?$>rl?u9~RF`Ug^4a0Dd`4jr6Meh8M!(0ljr3n#iiRvZ?FiKvTvI#R#9 zg6?SPSES3B&~IniBwHT$yWuksZn2IQPU`t6PuXp?yx_6AI!TY0)iR}#>J#i7c(9QY zIXq8KJu3%?-)rxpD#!MDBrv~)S%7n)lmb;~I&Vy=O4)V)!+6Is2>caCpJuygHuwbc}8S$zXgQ2phhGl4E=-mz{KMgZreXU7q8KwifXJp3Z5O| zXJch;4!3R1(Th(Uf&;>pQHE>7POZ5l1uDc+DTIm7y|7M^#}j)t`K%UCl0+M9vhL=X z@9tukr-Xy+oAz17Z;)2>{gt@2rqUHs9W6JNT%M!)G)1)_y&Q)15KnDFkOXM97ZZ^o zQ6L_yy5VK)53jVIGb%a=;+5N!jo3K~*bvE&W1KYRioWOXVJJ#=n-zD-)ycGl5z&u+)NMY0l z93UbD!ktH7Pj!b(@y5zX*%V8^r1?=(2M0&F>>o1k-_tKB{LGprmPXbP#ch8HEq0t8;1eK=KN6942u_4ev2NmfT(_h|6jTMio)v~Iv0{!)DW)+;Z zEtqC|Rmw@{@BJK`e{&prDmqkuyB^<$K<)+8lN&eMNiabyf?bb(z)DksLrjnD@03J! zW21-N?uB`wjgSGVT<+1**GFWEGj6$%B_VleAaY>td@^MvtNPXQrEew3;?y+d78U$k z)5S=a1JfoeJh&f;uBoHqS?|yFMe~{QgOWo%tvYY<=khbo{QTGq<9;}o6GlXc0Fh!U zyBSU2NU-)s_!$Ouj_btsUjqhZi1k{aLlA*ujPR@t&y2OJ3cIo;D?Xmuh611CrIU70 zr>alr52xvR(nqulx1ka4G?dM58ke!kyoZjpQuqhscaz zw0!zh9XKyX{)bTx(6cNWWn)xjOW=%PFQyfr`qBI~lGG{F5o5r}hwN!9W>Ir9mYc-o zc2OSYd3j*d)LV4!*oQ!3tEd|pWC$$ddaQLY9Q-SW zbA$d+y?aWvX7;HmAamy9=P%MM@Ks)kjcpCvom(qy^F!$+g8kV(+76CmxgMl5f zoW_Ykz_Bi;c>gzV62P985uKTkLLA7K_Yvjw^1D3tG_T13bHpNW;sITx^MR z9ysF?J*KU?O?e^|i0fW9xheIl^GM^B6uhV>Hd$C;Wj7z?_Lba;^!V!y9i1*5i)@^T zD5_C`mw6ZR?Upm+SL}E&mpxz6cKO__`Ymhd#U{x-V^%DPmX?-LSk^Ky3-|9B&YWUl zn!T$OCR-*fi3_#b9Qlw;Ho5TvS!mb!$Ofw7w1@vblQ47>iH~QlH$C}NJYM#S&U8aG zQqZNGq?-73y`ofsMBuk{23}&wW(UL}>_@9PhzH}zH)_<>R;O~@CN!y6GC6$UQp&$h zXZkOZxUA}Px-CD8xZVZAotS$9Y_uq?7^+{sAoroo%0W2S3e3vm90)>y zqtRj)MiH)YjuiBp>`Pmp=xI8wAtjtpb~*SRz{T7`Dkh>399-K@G%H+N_vq2`0TowZt_%<6#Bl^oKoFwA=JG^U&x42)#YVE z(0fhqnc8#V(3)oBr>!>aj(7q&Qj(@*bW)XHTr@Symz(k@J}ly6zpDqH!ZKz(i||l~ zL^As@ZZ~UE|M{8ZP5zO-N@m$FtG!}j!!$P)l=ScpKp%S$wR zsjV=rC&CXNT#Pl7FA6$3i+ysrXht}kFbFW%fWBhN1qat>uM{H~8(jaadrB>*^^@9` zRx%7{-oPy4Lw6tGE}biRYZ)=`4oOI`#^MX_ZpOr;*UV7r3z;{}>UJ6NdIKlKj% zoEwv94e3bJv?`SoLnk~ievr6Mp{< z^fwVlseR*H52!g)Cx`}%u>2DMAQ3Siw8tegfrZ|r>ux3lmx9A#f58t>7ApnJ)CI#G zVY>nnb)l2j_2b|eW*Wp=(d5AJ;aDv!1udhHSf!~UWm}y&C_Cbr-s)1vBRVX2X?%(#dL?*2&}xxSu$M(pQk|#{ z|7vH5b!LRAAaKd}V>8M8n$&X8RZcfknA2V0v4M3}K)41WeA`OJ-IeC(;0sos_B&YW zwKiy^EVz@d!r%P{HIhF9CwAX9=-6UdAQD~U;f43&RCy{Nph(=O0|BQ!mfzb7-(pl^ zhMRX47Bo?XwlAQ$;a6wC%g1lOdpCjbVAP;BbJ>y+P1krRvs5AT+8k6+ykX0(>wELj zcU}SYCOBdA<>do0_Yqmz@EmVo4mm&|hyi2(JPrv7hKh)K^H4ZkC>V$+xe) zrhYe~Z9X;=AQ#JW0M76WocN#GZrITozy~f;VGM*ZT!7D{U@E&+-+KDr^QtTXzfZd1 z7ko#}(Ax@vJoYo2`043cuYJtS77k?l#Fbk;vD?EqHPza`QN2cHq~~NA8|mlL5nFm@ zH&=k=6*x4qzubm2DT*CTwur`s31QC^-xyFwM2@iTK;+KYoHlslr^5p@?YeuSu31zu z@?7+CG`_mL!M;B5SAg!IV1!Rx5nbVs6VT&a0Qzh2Kk<8a1?C*>Y(zE^2ch7pVDP2(U2m>TsZIi&;;)2>PEw4SR}+ zhK@DVWdA*e=dsQ-+r1dLCdL;kaTLQEC@1h3uiG!G`OgkBH@D{zZ0B4H1Ln#})@UJ7 zaMJ@x`V^-R|5@s&s9QB-C@f_jfssgIumnOy3%^2FRyW=iyYDUH&2!2qEazX#BT*{9 z)DTr+bpBtln(U4Z=5un~Yt|h{cM|W@0bZ@b+4rO%(RA+MzUl^-U8makA(ZQB5r5y6raDJCF9u%IK55Wd z=8`O?Qt=EdG2WM{u}S7ItPyB%ADR&6hm-}KFfXdM1l@ZN5HIFrwPNl!y&pC@Gls;T zn4~=egcXA0(LQ8O6q4(}wFZyv*G2u+%Ft9qk8nyOgi+58NKYXG>eLW*zWcWKhB}($ zL?5Xm%g)GF9I`fJ81f@72nm$j1gLNPc#tB=3hdJL4hYOh(B(0qf@_cp^cV~{GSV=n z{s3i_e1w;;-#NL~vXnZpWCdl5CAgjAT(v7&+`c!^YS9~wf*XCrHLUJUFhojDx3D?E_n-l3#mbf ztZDD6+ilaEJlDY^=ymq~>yyL>Sqm4B>i+7evxE^K$soBklsCjvUJaj+-N$OH21&Kq zdhd*l13Qe?6*E_h)BYfwLe~}1M}Y~~Up%9az1|6EH~hvivq4NG*QYP2^)xmvA_)?{ zU6!ZHG{8@7T^pYtX5-M4?@`8O@Ii+kYQ7{6^hV=Embm#8LaKhcTlQ(=_NCfVf%WOI8ur=L!??brtpV$>EXr z0cD35qNCU+20r1^C~7Wr0A$fD-;pg2#2f5yLj(F`HX|ZFd-|FYLk$v&6-UBtAsQ{x zW_S`+m;mn9*3#j#LcO{{f$VpM0lE&1=dHICx-APd4>t;w znN>cguaAX20m8~RDt2 zy;t}|bU>I4p(Qn^-5{WSSCSb`biW)K%DMr`z;PA#iHw(FCLnoSi2G>~y z!t7^<+}Fuav_^ZLqo0lGzvwq=WT6SmJ|`>P&{DyYLV@s|=-O=Grd@*cAZ^ekfL zw01)xH8@I@)@?cOB))Xu06Luo%rl1#LB_>d%f!4M6mLa~hdBu{1Vz*aVkXL@K=j)W z?7^|Rr?H@lnPy_opOLM41|2<-%StTmK)oNB&I6^x+Lst8=V=uYjt0k zON%Rf2+oIpqIcf0W;1gmr1;rFrCF&2#C}yz;*MF+A3CkAQ9g;F0{aupFU}fi{>o9q zYKC)|)KPPoHvi&z7zN>qZz@2c+x+Cl%u;ky@4)WW-A8c{UJ^F$p)kST3OCJc>)6dXBOIMMBknt~rHO+``_}W@g&g6qBIGtfJ2en1i|fh`3m#xT zb8MeLcF-#gd@LH@?tO-IzD3{IA5b>Gjoh#Gk2Dn|OI%X8G%~8BXoJ9u!`Wm=3gS@{4CWxoPV=1q z2UWO;R3SV-jMt)lT*#>4ylra)647k@! z669G$N=1{nlP!gt3M3rhL=!(3R4(y#msXUf{k)R-3oH=?)Uz-fD;11HxDw=4gK0kz zZPa2AE%2wwuxbXHXaHuNXJrylm;a+Wu9QM8ErhfrS`u;`^twnW2qSNnT!0!)x*6nB z^gttE5I>8U`v?RzQjafpjaEN4`kdp|9e2FBSfuaUfIMy*S|6tGt3FkJYzFb>0& zC}xNzkn7DnZ{APLW`5OX;dnF8;0I%P7rNxO?s(Ou9X0snTk^SVTPh*Pq6~frz@|Dg z(xuRIV+dJ28_I5AQXjkopw-{gu+EF3?0E;(pFHj9y3CV&A z*W1(G)zO|x1X`MXjwZ8KP{wZdvn9;ZEam?YyG3QTwjtz0^m16W4{pJLhlY33E@gu} z$~_0G^`Bd@<^X0pUMgbNj|sDWfSedW-7(X~ z)FKOLNQ#lo?UJ16hf}R=Rt!Zzeaowu*J3$YbY~?wayNb7lOOxolht3`*_OX;P|Y@C zNoiN&824S=hOzcA#xx;Lj=Ys9@Md{V43_OWym5ZC4h9?B+_+_%r?fUYyf1C@xPBiE zmXvE~Lx01b)%|+oX=t;>c%aQwTAQuL18pAH@3Yx>WF-#_Aod;hH;WZ!_w8@Mnfm*( z7z#GNY{MM5*J-mE4l8Y*(%Ni_!yDUdhQmsm$Mt7jg2TWgPmn*MKjrM8o*5rEW1X3C znl7LT+ApTX6L;R3z3cAm-FFS#dDoZjyeoU>-2->uIdIoqKp5qV|G?$A{|BU7T-C|a zgpC?Vd&~ivkfDWg0zb!;1tW$xzq=uiwB}|`B_biXL7>guCSv??A*JUIqQI+``$&sa zart}wigqCH3Sm%5=Tss_X^SFMW&2Wy<3ihng&7Y;` z?K}taZwuL{L|TQ2#yJg4jXY^o!q?Tnp-sQ8#a%f;bn&|-xS>;Oi{Uz8TB(-ep|^0!@JF{FEm_t>-V{;;d+PO9{Ctyd;B%Pkqjx2?PQKT zSh6@leeI-15v?3PSfZ?Ai#<(5!6=Bvvo<@2`i2O|)`V4Rj-skLI&Yyyqv@!bnoJ`m zSf~Nrg8N5wTPewu%-7L#Z+kOD0@&`q7qDFsoOFs%hF zD5l+pkYg>3LQPorYXM(CX$oHDCdMz#w8}%qWpQe-y|>8@qPWA>otm8g=VT<95|+iq zkyMY(4sxc`-rqewy+@QIt!>lM_)G6e##&{%W2UQZxU{py;qv)hj+W)wyVp|Pt%1=Q z?aJ&#actKe#c;ekwI)9?JDXxPAz(cY0MB-E8CZ{<4Ln<+=ZFWp8t`M|^&1Y2J`?G%J1usT)e>?Q@4J5LnvQ#~*|moG;{$t(vC7x_1m+>y6)Lx zUH5FVuC?n6m#p`my7l16ak5Nb8K=DPyki-l&f8lLs+l_ zVhxi+6q8k2ET|T%UM7#gRlmI8lu#6z$#t&oPTY8MF_%6N>n~0jdt%+iDMRGCma!xY zb&ZbRkej>n_ziO5mX_)9^UdB#AN=w;)cu0-cx;_l8u#6X>o+!B4{FzI`;J|A>+Q9D z&0@UReJ-or2lmh&xX!CNDr2P)13<3X$Z}Lv38EfcO5qzdDvXe}R4fV|L!S0_MP}GM z?FUnY8!UZ)`Cmx;ss4(;d40Vu^+%tRTc49QS4Ag#K-b$&Z6X&d{EpN4;HiebPwM*I z%$9w9IoC?-=yAmc8ljuzk%mYlN#+)^p&=TzB+7SFBhTj zUEHe-AA88nn`BKTh>?iI$kOcU@zX>b(x=+CXvWg0ux*V|xFg}7?yk_vMwMSjWfljO73JdQSylg}#8B^`E0w9KEi$X#qsCFg8~3P^lc?&Qj3O6HLuS#$ z^9IzXxRZ5qupnresV)~GuBa;-34sXXhHjG79<#8(-MUIBN+ArdUJu1vR}dvf%{MDG zQRw^Vj#^a|TAH1GMX8MPONCw_ADIN1J^}puv69tB%_L;zOlEC$QaDhmWSTPiVK%FP zw}235Jc?ypv5bCLSc6OGs@cTuPtqz4demc z26AeXy`%7HPM6Cxx}@}dQn#_pY}wbR%X%{neV^6)Znoc*zDd2Ww%=EQc9|GWYgcL4 z9V#9SvRsA^lEuiA$*sl1hV3l zH4|eFiwPRH&JuKVr31~a;i<#H?x^JVTHU@)lLiOmRG`h%5-vIdF_*u`YvpT6gqzs0 zHluADV0SR zzD}$kr1W)i+?UnsO8Y^jy=3LS!X4nIze0%_DD@Y(X@yiPGCs%}IOndd=6vygGkF)= z%qeXyUUi8*L#rL(CVo;XjoQp2FEm*VJh2GmB(cB^EF6mf(tWlb4Of(!W}5D{~GHGai;Pa_$gnB^}s+(^ss!amw*Nz z{ah1Ok~^77f>927q2B=|1sp`8(RnrHAlT4E{}v4pt7?;#l1*}B!X${o4C}QBh%FK) zVG5qf%83(~q)%2RxlaL|>EASmk~^@O4RkYGnH zzfk%DPJOXKAN1+ZnyJwj^wDeW={&8`ofNkY*XNb%>H7OWtIz8ad{=8L#kdqPV{XgF zRL~6-whCI}XRPwV6V!fxbx6(eRBKrJS#1c3)v&}ni2{eZD0-?C8%2&^u-hrIOLocW zuvtVSD56cY3FGCHO4_EHyUW9R(o%8a?w_7ISv+>AJU&c6Uta&s(JRZ`+4T%769n{f z_ToX1t2L1AbT39Z^OP54E{tHjHS^X)zce!>8f5o9Sk1EV`U*2uuI&a(=WD&d>Hf zUHPT1u~l!YscsJ~6?0gGrE5AHdWcJ{zr?)@H8<4YM)W#{u(nu>OTpP5hC82cBdXnW zAN?o9Q@YcFxG<&bI!>&&0_}Sn=+|X%J;$yuHeP4BuPM00``F*daebM;DZ|=5`DaQy zmdDEE^758)`5HXK&z_NKct#ssPcfQWZn%y$xyHDv(p~~N{_Xz?^pw&sT>Mv^p5k-Q zvNqatpT5}8O1)3rbIF-irLTwO;IZfa4EQuT10&YxnS#TqlH6oN^h7E}w6t!(=_6|S z8?TGS^A%vZ3to{`1`|EYm1kIauw|MbqDQMgxV?OP^|d7$9jeRs^^)0A6Z)zI1J3~k z)M%!cY@Yq=MoE8T^6!)WSXwQzi;vfMe`*wMHIm#$oge?Wk^p@3%};VKRe$=X@|*si zr2&6>R8w#0uv>csWLq)UY(_c0@fbW=igJB=;-l?hl==Us6!DwK0&A1EI#adajFRbsv z-3uF;e#@8fG;7t(h*E=NAIo8mkk!xeSgd?bGb5qTN692(^%CnxjV3%brBO}Py`+hf zjuRn~WGtP~Y6N<<6ztl(^g;t5n=cM6AH&&oEXiU8=IO^wQze-P)0i8)-e|OMtc3T>^qxJ_iB4OSQK06>KK$Xws=pG9 zO}5S}G94Wmd|6H$pjRF^5t2MD<)t4k{bs4$;*mm6XGcb|^h$iM^L2EwsO6>7qKR6t z3akM`O*yRCde%lk95sMAYPNECc#D#(4K$Azjw1eI0`W*s_sI5m^jhr(gTZd=>Da#T&xvRtAuL4hv$y-_@x#0-v&jrk^IQ6< zS?)K((|sH4lTNIgr}V9(r>Ee0oYB*L4cC=tJ*7SCQrnRJtf1Cc*S(lefsM1UCEaIv z;B1`x8^)>heM;-Qsotja4QhRLy>05^HmuL5^sV#J%)@mfo8$h5>l>f78PC}GENzb6 zaL-N#&q3v$9_4xwt~ask2bAmmYJ1eF*s}&Ve-_XZ%r&3nUe)w2B3h`4h$cnBF|$mz zOY_bfRWFu=E?Lz=Ivcal@c@9jMD!o@SElIqmg!%v}KB+%(ie*)6n;udfz#%FSoI8hu(Ltu5Y*&o1pWO4SiK!vQpzEtnZ*I^9(6E)~vR72QhwI zZ$CewTxazS8Ei+k+$&1_0>buS!(LIYW8cv&`zrSQpk~kC(D!kz@1}7RHQ8@$OB%p( z&>0*KH_Cpj@3UH8&?zlj^i67g>8xd=95}YM&4~34>V4Us#q`uG_{&|1^$W_q2*A$I{5Zp;)|1+29LAWYr9v~Sz4 za&dX7`gSH?X>`t6PZxC>KM&XI=%Ox5cf<8M+NQTJGW(QH+m!2yY)l_#wJR`-db9FL zSUHJhQ8!jjs;`w)UmywYc?b7i`W-%qbt_m5taFRXhW^I-3oO z1wMH9ki}*Yc-|n`%)`oa>VysD9z^+$g~7U;7Z%P<@xfs;ySrd$ zG7sI2xkUVa`dR*gKQ5Q(fmt@LbKLvs_jw9{o|wjNhGb#hG%Z3{nxj zvZ~|!-0VO{M}9WYGnPn<^#pidJdub;`V($w+m`~P~-U0|d!5yOy@*V#_1At>Hk7?OoLswMgnsVh>N?+n6CiCgqEl$opYmG<3 zf#Sm8wxq=z>h}6tihb?jVFtT3z?=UBya|${wR#7E&FURsZ38TK(596;Ab+shf!U2! zuHqOWq2#NwI>AihR0|z&ip;q-JP_|n#C_i83HB-Ghfmz6hR0eWE#ZL2qkT~J?HTU9 zjNe=i;H?t!6sI^2%8>`ISK^MZ3#G6_=X*x?cmJ%g% zN2u$_%_dfN1Zc0-bmj_JMhoL`ODKG|J=c6D)jGnFI2z1 zymz%sJE~pu?@66c0eBZZ!D!eMq>*0B=yi|qoM2E2v4M8KW@Mxq1J(jKrl779N0_oN z6IpU6V(kgZEQZnE?bS|KYS4-0xPv7#G=nX{aux7xeJT0x>M~K zm{$ECnttqf+fcUp4elLkyvmwpBg67OXd_c;Gx~uYK;yU5Te$<;x@s~mT<4JXYxY_e zL)%?vzeRW)sKGi7DrWI`@T&dUc3bVHk(CzIBZI8f{ZiP1FCz8J@kV0nb=XYV;@2 zH`n|LEXVMj>iH7$N^(jI!Dy?000Y0btrVHuz-Wyh_aPLqc~-^StO85 zZ<;DsZ!W*@ch4E9(`;#)4|k?cFVem1xBTPz^Z&?YtH0lSz1-GZM?cvd+u0m3Uuh3J zuOnZ|+L)X>$78t?qd<)(SuL=@yahdoxXp=jCL=3k32#<0%4%x4**?+_(}3{_x8Hnt zt*6|3<>52uF1x<3+;^Iup$DqZ(;@cyboD)Sx%%Jn1!Kqojq`9XDRre}C7;uPbtgGI z$B$wqo3ohUDJo$NjOIl3gZx_AL4JLtuQ%aJ#1e5N;)S{c4|Ng5E7#6It3TW={eI)( z*yLh)`|9D*nO$R}GZl<7#8jJI#qEpn@s+{s@>qiZVt#bT@W_s(?S;XK$)Voad>D&J zZ{H^v4VTUL&MSYC`Wom_%-;0sb@2nN>O1U`Fi5Bd5v#T#gXz=ie>EKH0i1bxA3#79 zMWaLfn;Z6_4H;{UIm~~315hAepf}+Sw0L`6^xuG0|18`qd7OT~C*!Y%YI+pz^R-L- zhU0Agdo>=;_PvAbP`H8*EtCqaN0dLRZ~8#p7jOx`Wj@)UVPWO-vIH7}fat1Hep#jW z9#>bI8tJW-ZwXv^zkhM@2>tGQhP$)z$Ie zSkFxXgi_B<+_aus+wu39tsrdRS!ov2Eyt?=d1M9_?;&5C)8%xy9c_Mka6R)kplS49 z{28m!8)T<dDNj{S#zn{nmBWD8xw+6pB*wK`pZXJY?d@$| z33y+NBzY`iR0+q*WxaZ2Ay5_!3`xRPkzmKfKz5g4&-6#~^Syfky(5vrT<_kVdDvR_ zPYw=FR-eFk`oA)Zh3LdYw6K`@3AUP;zz#ndo2{Csi^XaBtJ$#{-(fHbx2;=aeAgaY(Pn=@tGSaXtrbNp{b&raY&X*g;Kjm%B5C8+N=Jm zFkuly6TN&r^Fyo8Y%!aSM$6TQs9E8uN*!ICW3m)Cf`<9V<_G^`{5@}(zG+W2#lZyM z%$;X1)u)a0Qh}X=oibyj#=RNRA~p=VALEV z!IrPr*zmPqXM7ld_ydg%15x-v_1DOY0UtEjq2E4;yoo3oC~G`sv@kiI9x)k|U60Q9 zV2j74yWN}8F|*jr;`Mx*Y)4_%HR+1j*%_isyNbnK)z_4@o&nm#G5^#=X$%(DC>TXNE>=1I6N_tc zcsQ9_`#h=_Gj7#RV*=eK?n*MQ=+-=awfr09fHJjU&y4kk!zXXN@uYf^+~|&9|9Yxe z*kR=Shfbb4R{hGcQ_4AVXUf;&L2?XFJZk)67^v1sg=)2GRZC3uGz(;LrdYJ{%E|Of z69zwQ$MmtNs-9t6!kVs>q-Z3Sh;&9fC2xCDa+*b@dUkH(`PDEHNv8V2Yv{=G?7J$E zne6D8%mlRGSM8%`x^ua1cmc@&?iQ`EO!dRsr~9gpapy;~)emP!N70ovs_PmmpKL~1 zjY4^D2U1<%?H9`gjeogHCB4eC)(7cXjOYyd6YL9`fBjBkwq&jjt4@PH&(dCfqK zJl5M%OZEqps9h{p*@|_iJKE%!-zzzrtQHfA(1=N7X8^G_qZX;|%iVIX!6K4* zq|zM=ZHYv?t_P$x=mDWWpa-Ous$Ez!Sh*Mh&>z$R{LEdY7q!pHqHy!+gO`n$ z$1gj0`sURmrE=*A*3SJI{8WEc{kPZPhn-LJ8<79HK^k2uEnn% zbLEg@+-58yIi{5IF>9FImX=#X!4@CLR83~A=F>}iEDAQK^;&{8T>vu;MR|-Lp$(`) zn+{xST@{PHLw$u@_C4?J$z-~V?L)>@A(oqLE3RB#8s1sD`k**79*FiN+j4Hj?(aCAZY>uYQKqM^S-1@*R#clj5kRH=vd1025hqivUs z1eJPh3SH?&ZrzsjdqsiGRXHyCtg;;HSi-1~2~2SLV~$yzI(0}e^9F;_AQ()-e&v`& zqe&DU#?syrXl*8wpd7Q9`X_7H=CIoxwy^b|V3>=}Hn-2`_qp2~_uubqcfm)Wt6jp= zCP^LsTyoZDNI9A_$ZwP!SsV{5O)q=Zvp!S*{_pEA{p@Fr`+ZNt{TpPXAQV8Q} z7mMTCr`~gKoeqV6@2u9&uS;z_zb;f0y=%{CIKPg*xVTttr!Q*$OJ;ArjPVNP?+dDq zG*IeeObu(JDN;s=NfN6466Mo~VWGXv?e+d)wMS6-8n3B6ctCVHvrq`@A834Z$5ipV z$lB;(%+7xJ;n2WL&j)*EbNDeg+jINW*at^UF49`f=A1JTyt9lR7=_BKYgo=vZ zW7JMw3Dqk;O-+O}d7C^=I}*!8OhPG7LPn^%+a$jxf4uy@%TAs+{Nu`d`wE3V`u*}Z z4=wFENWcH>FJ!v9GKAr`1~&`SGL*fKEO?D)$c=R#G(#&MjF~`fE6k4EP@jerE2;6g zvAvnYSDd))c2IEkR==Ie7K_;om-^sYtNot(@O`+beo(`%ZWB1E8pmoPFn2Cx){3-IpEuo@04p_o4NdUU-4NwX^SweWWf%(+zURyX*3z zQ7+iW@<2Mt^`C2^CQ*xNPBX)mlH_3^qRAkxDXNOe6x2`pi(op)<}FzT^M;lyC3|Ow zI~hx}ybwn8FUC9mVS3aj7~V zUkG4uyAl`k4@A%7lz>NWrypDfoqXh9?#@W*-=9>@?TuHMV17a6{!p{eE#L;yEB1}|xiEfmL~sopWT zQVNDbl!TI@L^K>|_JV3-!VsAt4Wfu%VCG`)rH;Z}(yJ+(O)U{>pwT)3Oa-Gr&HfBoy|BWz*^4?4az)A&@$ED%IWmtW@YJc9;(4 z9ce~kb}AUpjsR)=LOD*YCbeRbI+Ts+SC^UFZ|y0?@2A-d-oN#;LR&p6^v0f*T6;>d zExg|OzIA>|<$Pc9|BLf|Be1r^OuyUsJZ2Ax{Fk1W+Uj|H-8t_OlExD_hyaKo@K_>4 zNpy(T>iU9^&>E#AAxG1#=}1^R`du+>ZGQCoD2=hea7=^U{P_3k@6Un6Z+M*};BTNa zyT-1)c5IhE-VA^s1XOsl@x~g)8wpbqkA;(wWGL9|gYmklTN`iPF>rOmEnGU>6E~sZ z4K1H?imFe+FWPDnWJ^hjvd%9_zE+tMhq{W`-?ZUm4qlGI z?xH^~m(Nvyj<*!?mN*i;k4(P}mrHVKU}DFrV7@G39%XeGgB5guG1uoc;^!L2Zq(BD5MA*_L2ytcL<9-h)Tn z@t#1SC+^mM)647sg5PQ_jSzX5)n#tS8rTLS$K#Ee{y9Py9Cj?>;`7*pj$ks5N0}rW z3N)wdr2B!q*dXuIF9iMmAimBkzk>g$UAO^jm`>r_E$VN0yG8pDLC+Y_<==68xW5Nj z^kM}8vItV-a%m|;E!I9NShAD|{UEWmn7IG9xo?4wv#Jum_kPcLzcZOkCdtfXk~GgY z$s|piCewFm`j|pXn(!#0fiwk*EefpiC@P@4sFekDDMD5Ji!6(*))xXQtP&BF1*E?I zD6T8$DzNy1CiCTg&b{9^uOw|jcYpu1GxN$y z!^0$TIJow$+mu|)xF6@7wf*$XL#b3#s-Y>>l)$t!y_VBW;b_1NEN-2aN+knRLP|PZ za&l1mjaqrR#n-2oEs4d^MAu)*Dl33x%Wu#uWMp8u~B%{ zh&TL7_rw-CgArF-Y|$c-U*IjT=fCN?qvRh+6a5a4(OsLV_Ug<`7GFZXVrIs&vV0fx z?6UM0<>@T2{+prxZ}a*=O7v*u_45&<38R;IV>sGDS{CU0=CZKfP32L%DQdvz4Eqj@ zx=BzDOuxo`S<^^19Hyx+ej7PzrC*~-`V9=zjM01nruWh>@;nc?21QO)kx6ySW-C1U zR=S0fNZ9YyRdgIzM52P?bys$bHMn~9AiT6>vY{>se-hg;xDvNZc$-SKbi`MtT3S*_ zSLj#iVU`2lvpLh~K-F$O*)K*r;0Xq}2(%e<{rdb~#Sz8nutFE7X`30DIL9LoXMvX8 zJNtclKaKe$`h7x-D8{>IUp1n6ALgU8&(Ozd7UN|?mMT`K1S?z?y_iQ0fo9cNJndPQ)4Pw6Z82(2|p?vJR-n^5Ksz!d;Bz4 zl5s)ktg$F?{j6`@gWo{=N()cTx*3!NFl}xji zXCHvSo%r@FdwT0v7Y{8Shu06jVqQGGm6$P%QI}l>65y)Lpwq@Me=}h6ghOXC2y@)~ z({3;Le5Tbc)lotcb z{`F{E(w)mDU@ zgnOhD;c`A%u*@Grfp%N#qUNNmR<#5Qdw_@L70cm#cu1`gb`Q)0%V+hoJJzk+an_cu zCg`-w?Ofvx2M3xr@7z#xth*0btz;}#PuIPqMivCd~bZx%kD+{kQh@ z)~D)uwo|WNtu2^znZDo25|{5JE!pTw69GqdU9PwkRRf}^tlM>&>QXe-iD@C%(LFtj z7WLq(Kje0Y@C6drTK`~8eN)T8+A4Z~ZZUl*oQWmt>+5UR#*EYHtv|!04fxg&_W8oE zp`WSX$2{}^?9)Gik*3jl5~K{3Zc0&fx&rP*mDxcaVdMRF8wFNHQi(JnF4bK+hArRP z-rk;WPXo1z!LVaq2aKA|IR;BFbPN`wNl65VNyFZiknwCA;~L6Rd`mA!Se_QH|gC zowkqrsB+!*PYA~N*p}JSUe8n|P~wt}<4|E7h#r{T6$Abgo~Mo5j!0x%GD%6YBiYg1 zR1<@Z)(28+f+lQgiPVa!vbYa%grDGcxupOfEb8WAh_uzp^Mq-hjQ2aysII2Bz24sG zY+JtJ0{*=r?F%~X4QtL@_OoT@t!YTDIdA!;M$Yec1--GR2v@6p`dPon84Nl-{%47> z)l9NwY(I(N`5iHhC(Q*)@mseC&Nh%(Prj$_WQFF0jupKy}_=BFHrYUxwGacXTil&7; z^xczx`SawRu7IYiPM0I*F&H&GF^9{k>RQlM3ZG?}UfjDYJsQkxm@>%>>xs6@Hxw<4 zd>)+`iTWc;6yC!)mjGZw8-V!>NLCIQPTrg7Mn}YsaB=woEHC9@W{)^%B8!?vfQ&2j zr14|p$4}Y37RdPg*#SSd@H1Y3Q&ogwfNi=dO37)wbGWK5uLxBjoO;t?GXiacNN;zrnn>!@Kc6Ev>qos)kdboK#&%{>5{ zUd)RXL>?IMF&fZSnRXP=QG(B>D@5a*36>bba%9>G7P&Sz&b+d#vXvT5OVP7@$&Yz zM7$~_tCcOiMIbNU6Xu|-H5<5rgp^caJ_^|SH40Rt2$9W+QsEcl1>=P;-C5-lQ^hVW z;P9Pg7@oVO^f$rV4FL{aOsX-~3(|lPPj3d?Dmx8V`wiuSQ*|`%l~k_zN#cxXI7q5d zJ|NB9=i2C9k0rt`JJs!;ARPml#O~8|4b4$mmR-Ds8|Y}xpmE0_z0e-g?Kb9gJ8FH8 z3%n+qfZx0r77<7-72GEvMzK2tx`Te3!=`%+C0k*G;P@>>ED)E+*j*#JDJU{hwR4fW`Blwe`;|4 zb)HiUNFtgkZ6s#O9tyjoFdbxc>k?lLr%k;=quXmm@r z4`v5IX43;U);4I|ZV%e>fHB6l;G9D@HLoo{Z+oFnEtw{b`!IUk3_E1GfEH2gnpkxN zCM}F7rySY1KBAX~DPMbGGu=fwEl*agrI7=ZQC~0;iiG=9A9-)r zKkT>+V=5tIpL@^H5Pca~c8|;HaVJAYWvLGb zeFqVF3(n+y@{-O>2Z&j!qEOx|RGvv>qI_zFn!(o{$GrxourUPtcc>S3ruSc>_%wQ958e%AU?j%=pfP@ogq$Pp5n7#i!5J7sGc`{{a1ZihqBeyhh(k?-IG@ z$Z?{B?EAoRp+iLZ>@?V~14u)FZV4AYhDslF{+%e29MJg})+~q&iW;>-ttvo(DO1%W+n<}f_EOehyeABzj>F+`Cc~^R*T4@vB#;J=urz%WWS4fb0%LDi z*%=@j>^wlf8D}uj>X?Y&;ABkF(K~tD;tl|Fx?{!S6=>CqiLyoHA3HE*y+ZB5!?c#^ z?D*vN7I4tbv_{M10GPJMhX2#QerTX;olnxd?)HY_FNZ5yF(giPs=`M939n-Ha8+Az zWaEmV6&t%2S6BFJt;dPo>@% zZ>&QOO@tsQisHcK-I{$pD`GvH+V}C#p^kkRP2o?i8@i&WFYhs&cup_$9V^Ks3$qq) zf}BsrVdl?=yO$Cl&ipvcJdX``sTnhj$C+UN1ULZv00E-H6Peurj4_Zfg{aDeEHdQD zMQLZn@mD^CCYPr)VCc1EFNo;;)dk<_)f;~}y>)c!?hkH!_Knj=Prs1fek6PEz1bt# z&woA(5U_m6d?bXHVm`0O2F!=XiD9Wy&hPh!{Gnh#f~gLqLP|mT7gj*nNXJ7v=>5hM z&Y)%kUO)>vX%9%0_tQ6zjebnknMBc|T32r$Ic8jx#Ubb0_L3lKxxEga`>(WZzHg%5 zgkiI&<4su#!G^{Z2D`&nPp3W3Aa0VnFAQQF++a^18!t30NzU7IKh6U0HBEciIrR1g z&t+hDmY`CFou9jt-ad}M!80;E^W>>L^W+rAwd2O?^jzr)jL(k;6DH>^3Zn(?@$qr# ziRmU@b_&WKIzic~sVTFpL{iZIF5Z8QI1rD~G>zj1o{iF7N#QI?hjx;gu3i7!TtA zYp6WM|3n+dXQKd*BW4@i&xVgv20=hpHFY1oz&Jc)JV@6`gT{l{mP{frAA|Y%@ZbuzYqBmW9DwE}z)-a*JlW+|rxS-J z66=->(7^{7N~XA)51k63@{E1^TmmE9B z^kc_Hj~x>*pMpLd`d{gT81~#g@b%5HV^-=iX?1gaHfMtw^Jn;}ccbx8~ zJ-LV2J>&U0o_srX$hlY8Po@OzcpLpVG(~SM^keF6^@G=8^<#D#=^gJ!O(wd4wBhBP zJz5thkI&NG?x9m}S3E1)CC70e0h(>X ztW;`EKw!-f}-g9acBQ zzRLGIQHMCT%x9j5I!I|962@-!DP$IrfGy5lZ0@sW9eh40&l5D?e9$;PPR}(yffdM8 z7_G+a!=Y1r9)>^<85cb@#oH&=KL0kJH=DQCGd!;q;5eTS5O-#Nbb-^EL{LZv0=q3&agFVJ}WTl zCs7-SHUDJJxZlb=V`rhKPZdwIMMv}aH=mBxniY@ z1J>l0Av#W#*PF*j>?h(ODhoJ1n(BhN74xdo$NAtnI_4Tg#ea+O7frjUkMkxPUx#HY z+RXD_jzfI4fF#8?h62qo>w3~0GuMk_3bSDpmj~b5gkM)4e!x*Zg^SPx-(Lwotf~Zl zCU2zizF9_%NL#sa!?79g5U};4$e0IpN+CPbYa_jZbj$2%ACYcuBo)f9ztfnks>#MQ2O4`UQzX zS4hDr;SAo>7R}C&7o&Y3O;mX1SB{9lCr%()MB*jSOS}M#@igE1F5+Ynz&Q2S!;uFk z$7+G0D+Xf;9N*U1ajpf6Y~eQ45JF^fKQ@5pSgcu+_ZKT@38vHVPb(z}wkz3vZDC zjQ@|o$((~cKlfMFZ~AtM4ezP)S@_%b-4sq1nB&EnwgUXoe+3se_hYg6+j(GbvblT- zS=`UKV5tgqyM#w7_Fa*c8|EQ&7=%+$Iu~&X1+=bK7r>JT&{Lgx6zVi5ptv0z19Vlu z$TE~6s)QM!QDdWUjg4r)tEn;AJBkL86jRXoQ~`zjoh9}6X1dLBs0yD06$T;^S(SvN zyShu`#z}Jr!Wl(t<3nEYDO1!ln7RGU+m1QcFoI#MS&>v_qDUYrZzquMDq0YxY=rxv zg|Q{`^4HJ#tl%b1SWW%)br-MYr!0rZDoQ2`)$bf6SeR`T2jreHu`U6D@# zX`jqfq&aQ8ZJIQ9UCayrzm{I)amb`bjI4vb)o<#Lfy-ku53W3npOoxEuP0=83P#+h zZbt`wsFV4R7|$I!!WF})W+6kcSGi1`uxSTQMgOcrBTMwdLjkU>zv8yrjOT8)ZOB?%Bs3%2N`-7!_<^jzo%tu*@Rz-LkKqZ}SEy9^^1#1T7E&9>Zu(jvSJt08|hr7@Nkc z3H1T0zSA5i^zm__e}q1>d!_*R^cSWc5OJ0Bq89IMF}FbbWZnQ0J4r6lKncu&i zxl|^~Rd4g7M}Q|A6Z8}C+31@ZKMW0BJkH+0)}a4l9?3IYpHwV6!3;_$9`fZ&@~d}V z*$FTNw_t=eAUS<9rz3(0eea{Z-ejhZFOTa_r2EMS|4?wb0G;C84_7{O`oik~VnCh0 zGS`cQ(W@Ii4U1vDQ78SNM*r!D>g@4~?SdDi(E}5OD-%$}hT5tnwX;=RG`k z{P1B|=&#E^G8L>y{qT&{YDWU-d(eJF0j`H!FX`y5as%dYmiZ~140ttcqau`v%=i(q z2P13PWt+_1AUL75qk$DP%!PT}0-y)CjnmyQQ-`@W8?|bj;ILZ+S{3Oj&3#|t=N3ri zVAgm`b{ zbUO^x!qTv^TR0FUUjJix7{V%4Io5Hqb_2$x)0maXu?y-ABR5UM| zck>OCM^8dSIe_6Vz=k_u!5lu!w<{!n^cPdOaGthkAm1BsT*$fNM#E8^AkYdcUll5* zd9kF?)ZtLEy8;>9oxAfHL__)u+G?&(b3b!%AI9?iY>uUZl;6>ONvQI+@qOdB@>n)jIet$N4i( z)^R-SoWmw*e;hc59*%b7oW6^Z5sM!&aicTSVQB_bD`Eohv%{EuX%{Z-t-`**1fQ1I zmd8U1GALZwGx-ywZzkQ(>s=gZJ1OWJT4X4SX_>L09A5}R#7W&@Q;Za97%AU(0mJ@G zA7?YK*8#s_mNXzi=&Y$~!x-`kvvg}JhhPDId`Gma=yirGwUHA%WVDcA%ubEZ;#dS> z;$&}XyqJ#`c20aBP!$#VXuFMZxapJw4%58zND~}%bL^Z7Dm6>|5F3O^5NFM8!X%5h znoH$`0N?RAJTdm5dP5mhc?d$XK+{D5o=jvFFy<^+5uEdAE9cuqc>740Y|m`7QvmAq zE`?J|0JFo*cnU9x@>5gfLqY0tGCVgG$SZQ~Iekvy9zfzoj=ljdkx9INDNchB zFjHg}X;AJ?*p8mzh|EzM-tOVMv_%?<>>50{W~IWKE%Iz8JnmIICC-8^3pqCn`Hn9c zxn`5cgB}+^+O}`LdSuCXVsNC#xUWF3dEH!>1|t{49pS?r!xQ*zl(?OmN<7pfqrsOI zM5G;K=-BfWdg$#-My}p$+();gTSs13C(fWfd0pMf>mMk}wQ1*dVZ~5Cx{T0_@y$)w zK$|9P%=Ku&hMQ?F3>)eT)-Wp(I&7s_qsN2w!OHk10%;uKh0%GS@3Ut@-50`qR%SvN z&QJKo3R5sYHw6QsjV9B~mCe19l`d?Wscb5Ft|k+eQTX6CoCUXGG`8jD%g_;;9%6oY zT)Ge*mR>G?-h(oS$L)X}St7a_Ns{4a#MtHjpx?(&4AWgv=!FyrB^}U}fRcUj#jIiQ zcWI!>7^a_VGU|-FCi*#J7`9PsPENtz4s%?<=6iv!hElylg#Z0!hsu{izHozewYXRo!p**pCj{roNMy2 z3&BwG#qmH5%3YkhrLd38eh5m`@VrrToY(V@LXY85ov5rU_lKggrVeQSvh-17pt$T6Cn|e|@xkJF zmK&a$roU#{+fGpSw%ba}4xOOv&``cCbZ24sQr_P}j|u4{{j-bB^1}WS^N>bKElH9_ z(!%3{^pfRdkgO)_$q?B}&VcoC7THC1lL>MWxs+T+t|0Fr*OK><8_7SAkC1;PA0vm! zUF6f`v*e%2=gAkzSIF1NH^^h;aoE35RhfoO2R>onNuzmc$h<}-jGDipR+O6GsR&(- z7MzX3pfkq!Hwk|<3!|B*0-pi+#L6&e6#q0U?=>q(i^tHmzSn#Ve@>e1_nNAI_XuYx`3216oUW(T@PnZG~hgewmT+ie6S;=@Y-iL>0g0)a@&;d%pHdU!`@P zqz{3)V>s15&)a_Bq}@Dmw*ppg7HYk$x;a?)KlL{kzyH>M^xT2B_lGOyE&bs-IHkYb zGbgR8eSsec<}owZPaj!9R+6>kG_sj&BRfDQIfsmq3&E`6T%a`5gHd@~`Ae<$P6ptb;x&9q z)_wR4!>0zn`{0#=Yl^>_ScYXJ_{8$~2}C~K2-g@DHb%v7_}OSakI(c%8K2b#e24m1 zuw)3Ez^d8K6XC*9{13GEHN3J&EZ(4dib`F{UuBqirJ%R3@&B~79~G1*n?azSob0xX zDF^Dv>7+Ti58{t+h@YhK0h7@7aJmbZZwShK6~A{CewqFyyqWZrzQ&2WMT||_3cQxo zVe+=8#-~ahoKI!06Y&GUJ202UJ(#=>=QS7Rb?CXsWSV@&0%Z$)hA7SJ2FdH>d=#&{ zNXKIFm82j7aHDN0j?XA`30;D^soX8_V1c*P@C<pyypc! zVt_wJVu*z+JT^>ILhL0Pkbs{{z}#V20zRuGa7?-=wX=o}#P?q_y4&H`bX8V$&F47( ze1{LeXqx7C&^7DnTI1{MjfdzJwOxs-%Wz$!v zeWQpTb2;&P+#c_NIvQ{}T>F1Ru@POd=@Ud{OdTPr%FZWDWv83* zXw25#2eJ8TSD^Zonw6?!QC8uou9yf>r=Py9)*aCLJBnKugFod#SNLUYch6gGHwf)s zF}L0SEuhy!^H-j*d0kNt{!g`kl95|Y(;NQQEsx+|X5_~|{^1Xw`0l-*o1E-u|MAa$ z{Ij3_#Bp$~uH#_O(q z&);2n`GI#_IKJzgGk0v;dfNJxnWgFW(T>r!*5;;$RKS-~D_X)lUVLvuV?$4GPer&w z>*?wYHvj2*NW)-FXZUqGWSvXo<0hhVH-}U`6{DBJxw?~w^V2vz?LLqNOg|{LYlI2)1 zu0*yEOne~R+8{{{^r!2W*Dq}dVqgxt(-Cn;d$x5iU*FdeX?G+leEy1PS7Rm9x9#4& z?I^yjw7DFKFeYg4?Cy}6g^XWCchmYA_j;k}|7ujsYC@W;>U$f&v3Rgfx-_~D>Ls*2#z3a06it`~}K zS?to4VJKGNk0|1BqX*+puAs@@Fh(}_P{0kJ)aBDgTT>s6#P;2L@4lwG!&T8Mue=iY zpsU+ecbuyO#-uJ)m#R-D;+SfW$BlE~>DvjT#-U+G2zV8Co$gjt#V)JL z(AfudkE*J6S(cQc;QuwArOS<<23q`~P^@`LkOqtjn&Mw8Bf%vV#_#EG*&1VPi2hdO z#d&}=NEh*l(@XK>cu@mWH6EK%Ayk><$7dqSt13zAuoWosa z)8WHQpTkM-j3i%b7;LMIv<;?Sf^u81H2dMNe+?ybzdCY6bJ%LbO-q(Eg==jNix)Z{ z@PiTci;U3D7N3oZ15A@O13u^P%`zuFXMtyv;_=Jj?QT&A+spQedj4$UBDO`bEeaE~ z6Wc0UjSiezSXa8gtAOjGt0T8pyiLYS?mM10uMm0lR2B$OD~G zjnF+r99gSXn_+$FwlQL}MQvN6kwk5zA==>g1${yE_10=&q@j39#L$8(9vTY|w=iF# zndgSn>Y1yQE02F*=BqF5eGtY=FEMVGH2L94dfnboCe#v))z`=HjULE0(Z=5?HYE%2 zdWHSVmBzL7IwP8hM(b*;qX{A#r;~B|JX_55rU6*U7ns?1O>05y{xF7X2YKTHOo1Vf z;X!yDv+pL0%=fX}1dudtX z9i{?CA%XlNu{|PUiDC+z0l(J+Pq+csG)(MMf@x`S*<(ffZG2}j-c4teFu$(!0RN`z zA$KVrX2~qStZ`|W(@zWgIL)2Di6jAc_7fNMvm?`56LO*Z3-+_<$ccB8TbmPUYs??; z1$m}-YT*X*3y$)5$5QTAjuG-VrkCrEhbLcNzV|_QNVBOtM%cp`Fb%O`NO!+N8v!$X zVGYv_YHG;)3XhFuIR6Ig({M_EUks<2%gfd$p}Aj~FyQTA&MrD{gu?SszRsCKfnA3C6^ zRNF%^mNRY@Xx^mCTcer!(&%^d7aPeqsv|sWpjXGW+@-}EIZ)!^q|{~3QM`an9W>6T zZ|1VSd*#P2LU3I)-G9;K%jc>#E#UGiYXW)9@*e+&&mGcrw*ST(5so+BNE>nDg1&HN z0l$JFoKYBe?hSG;(_xFmbD(K#cltGY zdp3I{n}s=hBOen8Ys5$;uo2)oVswwnqp9meo4}kz`j&v-GEw1ah*EJ_O!3|>oIL&l zpyb}5|7Rli2exwm=zjVxV|apja}Vts-DiCGIFAH`F@bOnjUUW@nJ!@u@pwS`mw;3< z*2$CPnDKr(&3HfXpQ|7$%(y#x&`E!5+Av)){^IC8GI|bZjLOPCkBt6u=P&1}8%Bh6 zmjv~Jb&suK-B8nS3$+^+?0h!9^F^$id$?+VvWlF`jlMWa)afH+oae)^&ZMHGksK2A zb&={6M)hvRB}opL5wBgA7;!Mi1vU*>7JbYETSZ(4I@^L}HH0sjfbLmOOn9lCIP8uK zU^g*K+koD-$EpGSbez>>yt*;g7!G-(z9`x^79xw7bM3>q4#m?xewH2&(K3=8D7wyl zEBA92XALZlpShnKzpu~JWd5SR&1Q{nX5p`dKtG4?_>J=s(?@1s0+@UOVDTQY1Wo4! zgy?=Jl^r7bvz@AlPQZh@EFVi!joFu4tm66&CoMgasfboZDutC&9v$6)j|5P z6pc?Gk#0L{`zX6Qw__vw)65G)(j_w&4(}Mw-N-(>G3SwLH_m*FHQaR5y363-jW^;4 z?4v%O1M(-pg{~*t$XVp}jDK{~BA^y@WN2k=bwqVYY?GM#PQ((-mw8a9RyGu#yjmW! z+Y}%~(ala5!<+=>SKMsd7kp?JTC$ptC}Vc2D0R5+R-U$Dc*llqr)^uiIy0E?w5=Ya%~u-#;LD?&+=pRJ_;lW?+8J5#@N$?4}9Y|hVghxr@(LSnGua8F0>*^U-|MJv;5-S*~{ zb@eM+E6>@osJpA7b;<9qef*Qdi)cOp(@R}gsXZ#27 zgE5ge5H)Dc_u?9^T<^r_(|v(H}cy?NktG)Q6UcMP&6K< zKflzpB2I`NipJwnc>RXI!%O<2b=7bFB>FQ<@0UL1!&FP9{h{O$sgB+*eZkbj=C!FJ z$w0EVi&l>A+b7lCjP=|IpqFDFRiEKw+(aJxJgREnC~M_)R zkGn15k9%bsn!N;3fzidschk`J=GLK|8#e5e?ps%%Sg~;!1Qtx&ZN@*JVfh&%OC(v+v?rFJs@pax;(5et^6v zb({MD=7eS30$x!uJ?Q0_;6nuZ@}r@)2q}ivq_3t^bsacWe``^8Pd{uR% zLigrLQ=b!dM?q#>+*o(;EyEbr^>>F(qdt0{m9B2PxHFvg>jJ_*T6owPun zkSW$hsBpF71f;Du_q~N_+X?C3PK(|d=EaO`-8ph1^7oe$kidn1`kqM>za>7_m|uk* zJMb&}U~g_P@f3X{nTA1&r3sZU0J571mq@u<*)m^%I$$SKq4)|WI0>e|`EhlRcpY5_ z`{X(@yAEZHI|^l9-?*$`I7Q6VQ}%l_ zxu-?&M*nE^<$g{dH~K*4#z>Nc^UFf^2$KuG?2&mWQ^E7p^Mk~9a2b0)>P}>$?BP*# z(LUddG!V@siY2f43yoxK)hKZY+0YU<(xpO1?hR=`NEb7YO4k(06f^ImzZKF%wpfTj zSbTlq3t#JJgkfoLR67K}OOyjv~La>$4f}~81uaB}ba{n?qN*}HcI~@+E zqsncpX0I3@9yk7R4|V(ODhO|i;?k+e=U{vs+FZ-q3?P+sIn7A%b4*YDyva0oGU35K zsZT^cHa^bI7#+>M$s3~&8>`(_a7io2>4BMt>0#cQ@dx$}T+Jp9}t_S2b% z`Cf6w>_3xdOc;dG!^hzt7zgNoML47B9NfBkMRTm zT4^Ld}}0+lL^ zCq>*E2oQptcc3Bw;NmB)NE{R&poy&%TIjuq5KD1vDZv%v^ilXfx&y$4P@|X6g_*RP z!%aSZ4fONIJlvv4y#R6oe`M9_rfPMQ3E-i4U;@ym0RU|rg1vJ9h&?=h4aco=Fe(r8 zPSUG5ie1t^KP;h_%!rbe^gr1dAZsQ;){OgoZjd#5xtQ6NRuLX46g=bU@>@-Gtvjgc zT#Ms2zK(6@{!o7Jj@vbrd&*E%yL05&__4FfZtyi$+J{8Ii}J1&IyV{W5u$V(E4z_u zf=0hyXppYuU6^x^UIopnDl6?r$qjMtkTr8*hk-j;oyoX$P99CI{XxQH4D4{wBzu!8 zg>UkRO7k(sBb9YEm8nRo-WROb(3aZFl>|f|qcDg}O6hPuz&Hvj+34am%`37lea8;n zm0i)iW^weVOINR6x?;^*I)aHJdpmdKezL0*F0!2;+&FmC;KmIr;SG7MwWN*Sz$Spl zs+qos^x|i(gY*Kw9iP1hy6Ht;vKON$Gx5o3=*@JkG{ARL*iX1ZkK42t9FP?rh1~Qv z)2g{aNE%)#7)3w5&bXE~0)74qaDM|c+q050Yw~_p-QHE~S&>Z{?rom^sCn;sGs80W z;iL46&Cr@N01+xod`$rh> zU7Xh)%zcYzKCHXLBeeD&Gk2yj24zNgnS|ja9w@W&`$R|pq6|@Hnt><&Dtj7eStIEp zgJd&}izLv|Bvs@DRh4=WUqF~)K%F$!Ky{nCDZ1dVcKCHfB)VNrP)oy*eE^n;Vs>br zX1q!y=yrf$9>;vk`WPMp)NST!8-A%AQQ4!$}kBnq0 z7d34+1B5{<>)Ay5`Z&l^Zv%x>~xnEKK&yf%^9LdU%O_4RG|GQ#4sEf7ucLl4|5jH7@9hd2%-NIU5#uZjs=Q8s}N zS_(dAIrLq0?i3D`&%s-)WiAfPUjaTf?=;4jbX0~3S(@(XU((;!(wM5L4ks!TFxU{x zXWXq}ezLg}np75=zLJBdwzAnX&Ym+@FXfM)*n4)A&DGGHiOJ^AqKfk!$6#0J$h4+n z3=eQXfGo~-EQ{>HlqBk`EXT_HS&lLP5^MmCR{p&59LHaj*Dl4B!T3IJisSDtU3uET zf1knMIZto+fX4h5=xHIYi%N?e<@c03%9GM`-3slJOr4`-_~`Ck2ieDR8~fSoGq3I4 zC*5b_LTVZW&;`ft=o8PbbzIA^|$$hRzGoFF_?QAjM zPu~#bA3#0vEXatT0~s+&E)p_gjS4IPal0HgnIAJa?M&B&$2P6Tg_GjW+d3UCE67Xe zyRbafL13zaDT$K2VmL1}dP;=GQo#{l)3aQ7%*&4j5z-VQAA5}NLDTeMG0xmi_;`}f zqAup6JTLmwB|a#h&Kz+&?Pw9m8tM~uH6E28rL;D-q?(h9;!U+)2Z==^;ZQ*J`H61= zhNLM9W@WFgs;meqet*=zWz(h&8`iAJWcvFzZQZnW%jOM38-_NXwr2gB_3PGVR%ceP zTG>C?KRB>r`Le#Hy|B`|I+rYlExom+c~Mhi1GnF(tF4Ju&!Z`CLx|+lHlW75>=RQv znCTr*gI@NTOu=QGzmoaa^heK2?Y=a%E_DQ6)LnkDV+?++UsrzwUdHR?7u)6MQpW2k zgTK>1!V~N#@HW#5Z&ZG4$)s)|NxJ zYhI`(3(zb;G54DyD84oi>|V1V#;$_j>GM#)cz+p`iXgPo!rz%EX8%lH5bJUJHLg>v z;W|Z$)XbiTB#`KD_L;CN_oMpv%=i6XfzdkaT-=U^asi#ga#>g3lBFHJZ9OgNMcs`- z4@o8CwTRn#2sWk(pNHeNhOotz0Qjv{R75MbjE|pp-q~mG+_`;wHoIxl_{8|cg%_N+ z`@G%fkDa~i>|N)cv-7N-XN`_*AKpHE=8o(c*)vYxCRPC~feo+#)&Tlv0Qvjh2KFyM z8TOyrDwtMeC|MpaFL<$CYfTw1r)Cb}yL2|bQv*@pZ0k+Cc%CIQ z&$207I_B~LI(H4w?Hf3qv(8JBcOY*y4D|0&&RbQ(Cf!apXEsLhka&}W0x_0!4O1#o z1({O?HZPlkB1oRUv3MH$mRPKALR0JUYa#-ak>s7@c5o#wf)6k;PTeqx*?0h%mjMeT+yTfL4*el)iVcH{^>AN4cKv zBICbwx1sWiE-K z9ozJkoR+&`R7arS?jI2$J0G#e_KDf^Og^mEn8e zMf@(bKP>E3yh-|<+�ugFeRdHg~_nPVzEO(Ld71%`z|fcy2SZ%u6C~Rqj`_cd+W& zmllj!pjEbDe4E^_&Dx|sE!>z~60D3ISw#;+XAeEUA zplQL26H@=op?#zKa_6&WCT1R&F4;f2pMA0<)0JpjoDsxO*Bf&)ncHw$hG0CsIx8{+ z3;L={4Z#8h3ox_@lT6d7nahM7*s-a+0hlN9@Z@)=n~E)^$ZXyM%vt*inavwcNz?xY zc>JiTCrlIjSGwv^S16s5u23q!kycJkdzgJ2>O{J{yaez6uc7}Bm-Ihb=XF8q0 zj-oTlHzvfWPsQ+2CM7oJVRG@8gBmxB7sF2R=zH(rkVV*Yubdu^ik!}VnfbB<-KCe_m-TxX2tM|?3lx(5i?cC zs{$&|1dW;K5PR}n;)f%!co3a5iumN`(@@RlAvJ$@;^j8Y(?{haMg5x|lg~44QIC|{ zHy@v;!}RgFM$W}FjE|wQ#wW|CNW_qU!62d>mx@Nv6}Cp8CCkPZp4pi1M*#MT68|=<9!3NOi1aWTkf6IEMaAU#i-t%5#a>#C$}ov7tU`tq5G$VO+>2 zn^1tfQam~s+06aW_XE>U4i3uQ(;wPf1bE|@MdO2^;j)Ln8;ijH&Z1G_m`cas2fltF zvpgDf-~sI>4Q-DIMxrDlIxxQ>aqk|9@$=h2B9N#E!GnIjp+s6}xUByjG+Ba49W+-t zB?qT}_uZnNKls%W40@35pUuv`TGrR%@4|NCSNVQ-WLh26%9&f#t*G%p$A+*SXzioR4N}JHmaz$16&UY=%7ww>T!jRpt*{2KA;9wPLF2VuryH4gIFAX zKX}6OKxle&H-6`uAp69S>Mi*3AYTN@QTr7YR(Z&d4|+< zu4Vo5+Sajveclcl4wZRsKlnZTYuqXUwqeoYMa6fT=bYPowLB{;kByf1N$?|&tW$m@z+F0I)1_mgz~Y;z zP|Z;xO3S(9^l!@;;#`6rC})GSux@^xZ!>|uB#A0J+7N7#sTrP372(Nb%-p_9F+w*Dnz8Zy}2%6ECv*+a{T@1t@zFojRb2g=#T4=F}z|BoY`Dq$Gu<7M54b$7JL_Uhz0 zWYH!NZPm?J4Sn8d&V8@&&W*6pXMxi{gU@4IrrA$VDqi_Y#VZ~$w^XK5J~}#jd|#;; zmHR?Lcw+fBounG0dR9BxDeio}N-;i*i{j@r#Y|M%Ensc>{LHMd^X<= zDxjs;?Ud}4*(dQ(fZA+ygkMnG`5u=;vQZn5`lb0NnQlM$79JhR)Lj0qJrn1jch<=E z(>D*TTQj&~*;3pA@Xzb#x@oOOD(nH}pY!brrJpUd)OQS!>Jzbsn%W9qRb?bDNPcC+ zZ+&Y^Rz<3Oet1i?&DkY<(W+-^%$MWQ}`q_~3OZ&f~7cLKLm@JD;xv8bu0Y_~Y+ z<}u%!?vT+)sNlt)-eo&FSnBlcF+RQV$TSOFESf5{Krv0k|KSV#@Tr zfb*Irji8^DcD@ZB)?hY!^A9prKX~d-j%57n*R32}wp485sRo}DC(zm)PI2;>7Eb1V zWy!MjZ)0966ZLJ0qmwAZ3kvM1tg7^!6xvmZI^{oLA08(EN$;m$0N&`G|39-m#<0oL zAnwk~)0E#nMTQ0G(`_bwx}3b0adg3+x}t{_6AGQ_n1#(w2|&eRn`gB<9M)=gobU6v zBnNe1hz>NrUz&fM5ug@t;rYy<^{A5(=Rsn3Qm363SHYZi=3Kaj5g<74Jb;GyW4lIo zp0RDy#zJjHrTtG0hfm?s-rR$XGllfP;{y*FhQD4 zGiiqcv;)_HO{@dk`3@@fkly@*jOYhX{YjuY>((IEN%_NK7M%;s;&NIfg!Z;eCfe!~mA>eyfu7SJT95<(L#aztl>?T`WyfaUN&j6sovZ}tZY6`6{>__K zuS$2frjm)8XwZ(_p`Xy@^_VlI#ABJD!KhRxZNX#USarZJ^Mk8YsVD?5U!w@-lI{xe zqpVT~&{RWXC|qHNbHRhH5JrONpuvi8+D)gA(c6s|gcM{$DdkWswT%drj|cJ3IB?i76ik!wGPE z6X4cK`bY*Nd@M`VM47^9rn{>F;6{m!uvScsv&jrEh7rU?E>0qk5m~O2x8RX^B96dQ zP6>F)?s=irDDf~hVZ%8;v@Y1$KYSrOtGBHLIBPq)+PhBxoXukZ9Q}fA&CB)qU_sB& zJDUI=01F1N_#?ogi!1{?T9p|b9OzEfNBusxLv~0c9Soq+aSJ+HZnDCzF%fp{2Ete! zKm6$;U1&g1jn@Au7);T~E9PPiyovxJ0pyC&#u!`KE3o8@OXecU;%t%w0u9k6U_E_}w9!{VaCU%<(vCSRYHMO{ zms2IQp%T^*jL_%xsIr7;L1grOBgk^gA|y8|e(9XUK4Cp2tfo?>F4PT;ZF7_lxv%{9 z&j0@Jo%Ekji$HtwYK7yg;6}MhA(8#lpWy$GxT4@=1s{$ z*IVpg3D`@4o=WGw#rr31D*>_bgw?&#QgGfr!f_)5xN#6reGNI2EYB=GbKTlZPr5r? z;nS%TRdGAEpRu`bX>Ut&DqiQLrxngw$GBU{wk3;W)e<_S^oDDsaBn*mteO_a(0F0s zdOZ|n4vljO_^lCzwZf5H@etDQI;nz}1$uBhDil?8ZTd(i1*(Fct|F00X)z=v8y8F)ocimHR~6^fL|xJvLbJT3MJ$G+Tgew`^8v7dqS-sj{hhp2l_=pV+l1_XB!W=>+_S{b6SN zK3d7o$o>TFqF)HN^=U}faN{?xeXgxfs^0Ho^X>YyOQU5=6*@Hir?UP$MXPrWu>Ml= zQ)m2TR}1Xvr5*fF_UhfnFH0BItGU$&EW7KULVtr8f6DJt6&d*Ql8y$^6EWN(&DX7) zcJ;G0rCkUcf8Hex95G%fTg17+61$8Do8F>aIT&uu2cck;vJUV4DV zdBppV30H&|p{R?X(j@BhR81X6OS>^1979Hbg(Xm^i9i{uNC#nPrDYkb-pn5$qI?EJ z8_0yo2N!&#P#VNZZ(UtovM%WlBvXN8Jy#f-#`)YRzYERygzT<@(2fRe#)qSe*DUH8 zKWDh9HNB(wn*AzQO|Pq8-W;xPsqbirL<*Pt(AyU9gZzyCk$x4%KQ{D$k4Lb=gnmSs zBrsoeB%dl6W^7EQFlF1KFnCo$CG5euWmT&z&hEC6j6IM_`24Aa#>ZhQB6=`x3U@V= z#V8Elj&;edhT58VMO_2)8y&&5?TJqKo`ml^XydA_Tw%Q^k&UPLRYP4rG0!-DMqlOq z-y`~;Kxk7YL!VR(qks|lcn)HWr+^b6!2lLPM-^2qFZ=&X%ucB)Mh zJ8;@|=>m-0IR=PVt6J_fT|~0Wb_sEA!DoqVmrEYaEW=nt)P_DZ3q4I1co-1Zzbdn0 zd2dfL)tE{IK)!V5bx+KMR>+rZRL!shU7cavj4a?GKI73F4SFER%=MJu3GcX*b(&jbGHurTb+lobcA{{%Vj#Oe~B$1+Pm#+u;w|{hU zwEw_fWAphJpS3-E#@Xk%7Dt2YhWKsxxp&>lY|pZHulT!VKm+=K2HeHo;FhV2Gi`QS zL?0!}lNy&Nf%EyImgiv?o~Sf7h@i!)2bLs!pWS|#e%^>);YQUvlN8rg=g{lue;QHt zhVgp##VW5&*KOXa7ms8ChtHnpP`9_fVZ;W?`F2u5J?H<--diJ>$sWnh7WE(FQi1$&LX!rYrt{o$PN$WxFyETJ zthJ&+;VbvXXueU?Rhvk;@6F!KB-qP)xhKp|GxjjFo}dn|rAA#xbia@22QZ)@Q?*Gv z{0jvb@l&#g_=(T!_f1ABDuZFg>v1uM)9aWZHe$EgFTjYYRh!+^>5Mvo5Uj_F{JQVp zUs=;grhCnrrArABel<&1F9pVRa7F(z7(TicIJ+?*3`6ByWw)l z;u|QErP%NG(=9F8;jM?b`F|{u zjm92%zx7g{+;HvZ}G;XfJgwgyf=MBi=P zySHe>vt4|!M zKe7)!!Hp-xc=!;FCs>+~Ow8N+=zmI^P2Z-SnH|9ZlQ!1XGW|3>3vjy_w;%I)oD%Um zY?5%3qv(8tZm3ax%i_g^2zMsbbwNdWe;IQ?eqe)A=@Rdl0jW3Smbe>uI0RCdHEXH3 zn6upX#)>8_ZCkwQ!WE0R^wgm5#kC*z86UBJT1v-^$8i3v!L69w-tXDr=*eEN{E=nj zTY4NLb@yCdm`rob!Z!o6Je@|q{H1D)m$ykU5q&u7fP+yRTFu#rZX5AC8L9AjT(TY{ z`UJ7trCm7j009yVx_5=VjJtP5GXv$%x@`J{`LyH;5?9H?UJ8$46`?T_3PnReIGPFB zuy*ADIv|x2i`JH=RJ^vT(}ZXkqa>CJ;hK}VR(X&W=al|#ny0$!aCD8>2<{xBBY&)ti2(|lS_TpIuUy8ixktC#hQUsgV1 zTxxm7Fo69XJYPbL#L3poCZrBtm7y5B$K!KLn#;j-r>2{f)g%st6IQgDsjY!0=UX-Y zGKyfW7}#b4f*ve~pNnY$UoFk-_{JAEE9fOrim5e(HVi|Z*vhFOfv>s+%1*~L0gC|#gkd$I_8_>+Gq8gvx)<( zuu{6LvGhXu>C48-$0F}rwZ3`FrEB?0NpD&#A3r?*(7J9NzxT9jFWBG{3+3Lv-CKH` z)_CRI_kf4E3g!rNqP4)7Co>mvS^y*U!AL8pm-zf%-(<+6OC*R!2mu_lp9Jjwz@!+p zlXBOrcwKchAwAuT+Z$7LE%BD>+G?PB^ZL}zgVe=TZ;&Omg~A4u;BYY&d>tqQd$2q# zu!Xbvk4Na*xj}+uq!T6_;v@yL{|g~)brd6Q$QN|Un#XCEbU&c28{mWp>-XDs0VNQz zn+0X*As$WFJo>@8kE6fwf=`2la;gtHC_I9X2gXPs5DlP&(ws;T(%I3{lt?9jCdca* z0Dt+GTIN}r#t>=}cPv#nt*Ox5lzYyVFR46J(HYts8p#-Ia1KJsBA^yEiufqPMgBpz zUU{W)ZRaD8JX>&~o1H~GL^|zF5{<{-Mn9h#qGQyQ8sS4F<#J{mfJLO6EF~Y#c$ZLJ zj!>JvI9?-ZHp!GH+ld)&4!Knw8Dd~QG~G7AqXZBhdQQ_6koFZNssPYCPvHS5ichIy zAQRSsIGAxS?F~02{JwA^*u4P8;tx@w1z%Ue`=)YCp`_Tt1*UK zq-8~Ysm7@)$H_L!(+o+uI^d)CPr_2PH#!ppuw{IW;gd^Lfmd9?WB{uKXY9( z^?J~1k-6FR#7*7T6Q6F=eYS&$R_{mT3#y!toA6UlH~^H-03KGT40JgIy->swY0YY-CNXzKD4pwdGRlzo&^6&<2fss@(F#!zqtVV84s1lJAYYz zr@ZO-7m8TDJofD=iUi%1%|-;DK#O8;H)k4M3&x!mqACX6<_U+2gKp>5crPB0@EF_0 zF|`T#f8N^`gm|2dRKIW2KotHQr$3_74yYhN3@0R(Dsh|?QK^xTk|vdsrV(mUCl)up zHf~%yvYljOM*UAZGa@mwv9j_9up`94=j*j;LV)IU_V?bsd++{M0yymib8I6_m{jjaTv3{HpO9-ee^%Ay9^`MnR!;3bY`RpoRzzJOqg8 zi(%2W5ONBhw8V(hr#;}?rEkpG1U({`k)B!?%n+#6Cf|wL;d8B}JasfsWjyR%C^isLPS`!+~_g*h_lCjl7W*PT-1WkVxP6TB2ERRGDQS?R^pq zI5A1P9|p7!7<`H$**M&Q19>|19fBkJBX0H7Mu8If*_@lGus}^cXUkj{Da>T8+?;6z zyJ$0alQCPgODrU#*b4(uvTSR!(yCQ%7^VaZnouP@lge)Us9D&uOaIm=W<^bF7I)A% z&1zn`ziGu>`u`*BDcj*pRD{dubCs`8Ux<@CbmOc{FBXnSBFB~IPSh9S$sa8>kJTg# z@S0uhsXiTW2z012Qc8Ulou_Y4X^5G{QJw9nldP#IN!;i9h^cy>?+pf`GmM7p)gUkS z!XQqZAn^iG8~QTnhYag6L&91ywo&95t6||x@jSl8(?XY-&$%T@2Jv>WI3AC!GnpJK z?p8PCQk$_|#Uj>sbEs}%`qhQHqv_48vYx}ZD{FdBs&KYT%BoAy_PR2Nc^ij;DP4HHlQb*6yg{ zRv4WY{VM`-vEAHhbPgMZl)!H{6D`sJ0C=2*m<4!SM-sd}U_l-jx}y%*>2e?#XZemY!tyJAYsF-#5E& ze=`fS`$9tQ-(N@3W_e*M`%~dwD+?CbVguIh8giKLyAjlPt0+ zNFi#bFl|9w(pI!JZA07AcGNLHXa|~0^JqufiRRM+T1bm% zXIf0V(5|$EcB9>C589JDXfKM;MRX_~M)%O^^a&kK$I~%%E?q$v)6w)A9YUwjcl13S z#}#xWJwR{Kw{#v|Nk7sL^b=i3*U)412wh7{DM}|&jGiEe9;L_W8G4eQqNnLoT1L;) zv-BKYN1gNyokTCu3-ls&(dYCf9YxD2PAjOJ5|pI#DMc%(htiax9A#;5T1B7HYFbNc zXdl{-_NCkD0y=>9rvvFA`hvcqJK4vTJeaGvnrr9}`jdz7P#(s^=`SW8!6UhrnMZwn z1ymd{+b-4?DWyPx;rt0T?LYC!c`H51Batw><_RLH z=}!XT*FdR{!#aq8Bbmn~9h92Irn}bgtUDUBVD+ZCZHDlc^q!7Kz(K>4nRB{M=;Cox z2W_(pXY+}GGh$aF*!GkwXLRw5rY*;LmDcAqbb}Vengb>9VdPoQ-W9&dmdYan@g8-z zpOV01y%CZ=nun{MBHgN@L%d!-l-_O>{UFhrRh(tWa@OXHE11$bZ%vV^=qTlwTijKN z-j1<}Ld_)gIZLNp&BS6y#w={UBlT)603;31C0fP4W{~N zs&qxwY&#$gy5edO)JVmyNZ__1QnxD>h!96=bVVO+`yoxbzCS=7wr{U71LNt}!>@@0 zlj%45ukivCbJt6+DFah;Hy)19P*~sro|9n|K6t3+WDoTiJP15lL_Gr!AC1R)kS?n< zj|Y43FUt~*|M8HfmC+mTK}iL@y_yK{5DprkKj}x|1r6n%AW)Bj22D?9QG~R^E2rKl z$%o(4rfcHbrqGfr&NPuQWYtg)%kS$27S^QNUdr0^;iad`SeQ$ZtA`| zvQZEA)kQjTWDaW28=**^%c<4&rsjnd>37$KIhN)C3&U4=C>*t zNu00i97&+Hsvb$AwPsD{A+XBlO3b#d-DheaBAHkBN|UtG+>P%9FsZ)c;B~c1_fSde zwQjA95Ajg_@L__P^A$Jcpo+$2s!pB3%z(kX(q+n>Rgmc0XUV~vW}@=XQhGLBL}lZW z3pedV72{GiHhn~8G?J4xtwa?xQYJS&uEVn!ZHM&TnzEP?L!eX5Ho37hf=$`hcdt`b zH$Swhy-wrYRB2VIPIcHcXw|4rqux|(Rd!A_+|+GVcTN-E)M!;bO!eC|Y1O=c^^khI zg$a63ml(c94AQ1c>fgcxeaJ~H-J%5P<|I7~oSlB_VB;AaK8@<&s2SWl4eMYB4lbU? zb#NXH-kyH-XQLnNKaKR~K(E<84fSU?9h^Ol_2*m}ygCg=u}KdGZP{Kc1g5NPdSA=k zCxc(G>lP$a6>%hM>XiQ}SJA1U5|k&@>6vm7l#SJHv`;Socu_7Bs~c+{WBeEU(Jw`=&*bnNdBic5|6P2S(gckzaH@eD$6 z-6G5q;usJrkOB{hdHsaFJKDCNfQ+oGGB;utzVHNt1}vWx zWhQ3XaZpaq#M9eerlD6YHN$*3{tAYvb2-t~xw9R0kg^0as`D-5odA!)S zI{zC3i;igf(l_Y&`>*GIyhTv+%E)`I=-a7CB<(dd!&n=LcMc z+_GY6Y{O-Um@nU05CzjcmfVjEXg5W4dHFxx62uT}XzGUJ`#Iz;JN`9D0`swoGhD0s zNkx{HvaDw(5U0xnCa2f#+rqRKo)2ys_VRN!R2>aYI?HwsT8U*%kE1TC3yXr5fdVOD zZZ8o|_k<9KmxlN}t_ARe;ky$LUV8@bVd~f!=W`1cYWt4~(j?X7+&$=RI-!jL)nw{~ zZE2^U6#Z^b=ct*NB{js79%%luB`-xWUK<7}-RIuEVKF7ugovrg`@vSU4@?M36qzY) z?N-Qs2b7PS5;(UmbaJ;q)*zgn)ox%(kMX-am*^gRZ{Mrh@~b`BW>;zDrS*45h|uO5 zH9$UCMV(m$=P#8K!;owdVGd+VUo7eI%aS@whvTPJwqL?80B-4VaC%9TyIJD{4AuPE zS2gB8#ZLk>W-dSnPJDGW%K7?*tSy-2K+ujScGa1AsOO=Su6)V1k~;TXZIw*TuTxLm zqst!;Z;jWimMh3;W>VpMdSCSx9XwRj3UAXRnID3!_)U){@Q7>zqu_>1aW@5d#G7C4gCA-M76K(WsC6H4y{(sa8xu* zTu!Wq?2f9MmV3mnD)6%F!@F;g=5v!{p^Q%=L^ovTCW9i&uFKx}o9eiCZv5R>n--rW z#M{}^Vz$D$GE~HAAqB#Q$vmyTP;;%PzS<|h*ys9E4rUVV_9T?l)?(W%c(#6@?)0S@ zs`>&^?NKEpA3y61Dhs;kJoGp^f>N$3X@3iiU5F<}_FX&xQdy=?&8x0nO7v*c2x+#l zcgFd|d%`I>H+i8*GL2MtFLK5}Z=yJ)QmH~RNqJfl2X(nsA|@tAJBo~9EvRo+TD{SjTTa?`N3So-iCZ6kh{1-ILuGBGZEWKd zRdK4EgJUnOe4IkU|`R$FB-%+3;fJ2JIwNWJu#F)a9i%l`xZ| zCSZW%IF`bv0*|Z<%Ge0h<>u|$R+pb9N81!g2oQ`ay2FeFh_(u^e`0XI4gHBrR!E{9 zH3XAwz)dNH*x?yOalrVrBU1}G8nAQGU?%j@e!| z^Zj$dLh^I6^A|V#=K?qGQ&C+}LWSVBhOk+f?s?iz!aK5suzM=W;QPl;N~TH&JHYY! zog>(@bHb^w_jCvH4`00UgXE!+^GKE(xn7svw5;$m*H$B5JxHUAlhKpraPeMR#F%Tq z689nI)m*n>B6oVKziMdlpSt-8^La~AJ5h5{;BNC$Q_Lq(hb%#}(Z*B*hpMU>uRVep z>>1J-x;?=?INmyiJFz-RHU%y^E&igNx7^3{tL~gK zu>{ru8@=bQ=EV z8k-S4f8V*_(6q*VP>N;Vv2)+|=-3z87unizJr{Qt4Ws#9kfo)pwX0QMNUk_7|ERD@ z@gQP>)bmZb_+O7H$j?13?*hfFfnC=Uk@~Fbu@APP^?mwBPsI$goyNQn8VF~E6av#W zt$q8~QEUQaDY6qKj#77ZL?|tN zJ`}rpeWiTm)X{%8mvr#?bpIsl;;99}_~lN_PSj56pq=0O*PZ%t?DYX+$(KF-R{h%j z4Ui6+GmR0?n8;SjGwVv8rsv-+DlD4&4x=Sy6cLt3z4{wibGa=`EnzK)^*D$z^TzGT z7_t&sjvV!&S)mIO>Ea0#V5Iy`>PBKu()4OyQ$(Rp63@GTuHPz#y?=wZ(b6kMRICIz z!e*oD!D91D_Z5jlB$S0td0*3CMCg%2|8`sz?d`~1TCLaqS}jFCT+@xkdewDezo?{W zS(!D_IsOFJ((4M@$f3Hz8k=gVcAZT+7z7Re+3b9o;Lmm4=OZ8$;9o2fM?+5oc0eVZ zr1cS%9J8nY;D5za{My=YD3DJq(T&~OcBK?;QVGXpOvp-9I z_GWc$Az%TqY^jx}H#kk)E0?kos@-7Gr^QY1PdZF4PKu*VX&)5@{kXSt8`|t1Bow(B z)YODaUe9fsI9i|n-t4D@$E?Sg^;O3(Q&JpjQR?y0sYRwIiWhsSP&bKN_a_&AtX8KS zQ|pZwsT^q=i5oc_xmU}=UercRb(F<^^j1G$**N?7q@JYiv_9ilWG}r^uYSyGd8C`N zPQzVoI7?Twip-(3QBz!^3bN^%E+P}Rj>Xdor;@r2C z(YlDB>)vQB-G*mU@B=R;j06`unn2-*i&5&){_EOp#O5qQIx`5v;jI0vW>fkHJ-;;d zcLc=~Ien5e5VAU1W@8Ok|Az2>JWVD9Pyk3B{TSc*IQbfjpUKuEUIe=Cw#$ zp7)>k`J@g6NdyG=)CA)AwZNHf%g}((9tAoHj#F|nibv6619rgi!Gb*nX5@fHwMbP< z2Xc2EF@W4!cZXc5O1uKSH2$Df*J`CX$jc{Y=vMI}#rNc=Kzl=5Lsjxf_{7)*d}6$A zlaCL3`0tR|P}}g$@VhJSO^Pv-xrXD`(7DtAb7=Rt!frGNMI(<`6mj%xF5mY#+9sdi z-}<)>w!(*RhmJ%%6B=vwGKX4oxFkZZB^>8+))m-GCA zN_C=#wzY82-8IoJA4zI}Zu>UP?7+P#9hR(g7|oJhyCoK8Qzfh3pFk<&rf*|0TeYd6KJ*cjB@ z_mRK3q}GPwvS$13=zNBu;GEwcN1)DS`ClEw0lWV4F&)VY=cIA8rgJiJBBYys?BG33 zR|esXth)3)NUPx2)vIC&X!gDOYguutOO!CaAqF&drM(>2Z7x2XZp&%Ab$Dl6rEK`I zYkOA^nlOGZSUUT(l5uazpnqYhkr{G>wRnM}c;cVmK zP+Rx~`$h35LvT}9=e^k-CEwpFdV?eV@6hR)i${CZv)+}g*%vver@0~}K;4mdcs4-3`Sy3fW%G(g@t5^vp z#L7#lZ`v?5lgw*uK4UUvaFb4W5+R|ssfGS~5fnDyE`RwPlrXR-4`+0X7~rX0>Lvcn zI-v}R^!vz?0FobjmDt-_mN?+cWEim|yXnQ0`>HN@z*s&}MA#EOlDFx~#Pb{!J3uWD zXK{-k&|*?Hk|EKH?R}9rB?Izb@(|IwMs_?d$ z;Og`SUUp!h(7qk_3-_8R?`f@tNHxWmmZBXI;@FDl*-;8}ffH$kUbhg;w;#J$IZ#^p z>f1Jj7K=r5sXN<`yH%9z+|LSP-ehO8s5`5?Y}LwRU&&v3@ioETHkYIpFDpPmR`S*a zaJx2tRj~aj1N?4_ySFATWP7D>h;S4fUN^RV#fuDJAs!FI{m$5J9uYA+7z~xC^EacL zI>TPqgFeZ4FdPx{kFFa#>>*L4A98=b&NIHYqN+c8+F#N1Tsrps*-=U(#sheVrb2=M zvUeks8)^m!AYUjwWs^m4os-1k*KPw0(5%wA$^ZDt%*4+E;YchR;&1*8b!0#3W zP*$Q3iWVU--xkvuEV{!|WWL5RhG7e-8DJUjp?oOagtbGZu)A_d!-vdjYFpHxF|_U- z^OU~7&J315N;umxML%ZdB6y4`rl{WM?N)*bxoM%6xvn+2DK=%Je#}S5kAJvS#PIUR z)*b4|2i6^uJ%xg3f>*e@OHYpTDj!LpyobcJ5hBLYJ7PWW+Q!1Q>2{~DJ*miSbY`wS z1V<;!YRb^%Ria$H%T|WM;z~ZND{0d%@#Oh<}w~h<=V41701te~vD;NQj)lcHD z{?2RojVk`W;#R@&AFYwc5pYmR@HhBirl>D6FY17bTD?pgY=9V(7r>0Db@h32<|tvAl^AG{dO7zk zY8HnsU-F@oomnmGA+Fo*)c8rj{UrL!oJN2PW8TGbR5lUl{b*9wLrQ;{Yaly(Y%!tR zLVX_F$yYa~rp`jV;NI`8b3akB;LRRb6+XntM*fXVj;r7-@BH}XWRE3Ppc5-wK(g(d zDLnfaxREr=9%c5iB-Nv_i=(eylnQ@LYFB-B+2S&z)>|B-i6O2qVXx+2&(%3tei~wh z2^9#=_qu|MqcRUmo1aP+wae|TB{8l7AChsAud)A{#_UrY+^EF<1k*CBb<{>6@Dldc@Ky17TPv+V*wX8SJM3+=msQYl8c(M~4 z-##tH2%PB*-w$uSr93d6Bw`ENnMYw?+x4A&p?{M;=gqn=au-BGV$ccKQYfc5Y8Swd zxmuPkd;GVaWsgHIh%iuZhFWnPQ21LAX$%u)i{gVcMf!&ozPAJbXcXp2m9!qqyHcI2 zi#Iz(ad&8_iruxUGf%niHk5nsv<55yzzx%gG-CVvhh%h%rbM?APhCWWAY$qzp7QK< z3h)ICj9~C01v~F5{O;44Vc~?sS5M1M}r1=8*sU83$xJ{RyjLKiPCb~1GJUDJ;mkT9MU(5hb3nm-_P52 z`vvg#OX!1R6t#TR<<}JU*?iR$#+er_>hw$#qC41HL--}$n1)eJ?z)jH(=(MP%uo|V-sWgwBvM7o@vZTG8x z3`7C}CflYfNJB{!WhX#gfIUi3SLLbUVEM?Iwaw$X<$`AC3SQs6OKW}L%h2(*0LpW9 z-#9a}+^1HB66>Z(W4d>v$(Nvrdso<5{P5e9BDO}T-0R^>ZKxbpl*lH6kxIbRsW_0X z;H?8HESHROE38_?F|*o-C!W{|;wM-u6R=3Yml3OYNB+fC0A~2Cv#yvm>ap*z47K_` zsXQ~3*M0nLeXU;8xjZ`5qK<_(<^_iDg0*Ul^MhFEi7H{zlQhG=NmPOS=uyRhkeqPe zqX1OQx~(6ssXB-F*A%%{JqDuUn|g_m8sTYU8*RB6>VrzKGhHDLyx_ zUv*fMc?ub*mCaDbo99`qQnXnZ&3RP32oE zF0@uw;6g%Mt%*MYf!XPhy?@W{$WayZkp#bXAVjXE{8DQSMSjNVH7)6R_&QeWyV5uq zw1MrJK*h|s`}*ds4sj>iZ-TU_bYR}t91==gF2>iE7dB`u#b3m*IsYuLvC>gIQH2S+ zL#gMrki_CqMCuABjK;igqQ1XfiCJORzD&Ch;15ZO%PYKjH^YCFCS9;$0q1dmB}4z- zT004$@E2{m(q`K=h0=mC-#qcKej>mtb2W&ahi2DbN0-wtJK63RwOCM84$=`XQ&B{&~ly8Yn0*Cb?Ox7?+0`$RGS_w1*+(xA+5|J@m$#wk|xD2~@Uymfxv+CqMJom+j0! z^63YZFRt#>20|fO>K-SXr{D>n0V(let{YohyuxN@l*$ahug)5!A zhm+Zp{Aj>{A=(?=qE}3j+JV2(J{uq#@}LDON`s)es-v%Uc;pK_J)Sm{s*nQmfL0ti zWo(CEyVBe3Z60)jPXp|CYN!qeeS6rF^+sW)f=k!|{NB?eakAYz3MXY{Y*HysyamV? z=c7A*&M{Wf$_a0LD~8c)0wN2(=%(HN6Wc$fv~zW|%XWxn|E`k4m;zWJ0n}tScqH!I zn0nv4J0G$N$(KhppS6GJP#)F%VVgtXAV;X88%}na8xU}r6!5veW&CoW_(m`lh*`DyHKt0H%{g?ug?Lx||j0}NbynmOkr2VtG zb^$oOHQw1MO6%;=+`d&!0%nGL0aDawhM+PF0)*+rpKE%H?VyM{=Z0$0?(71vf%3~v zeE~F+)Z4vlg8WdB%LDGW(;|gClZ9w9xb79V2(uw8?`IlO&x%HKGIGbLdTWHE>Fw5S z^>_`oo)|&whUnX(h}3jWzB)#0L7t#Xb994$6*lDGg=VjmcjVV24Tr>TDFRnHCiH`m z7v&PQ<7(?`)-XW#jKav|HT zltKCG(2Vy&t1UW)h+3jflBGS!l+<)w<(nWs*Kv2K+oO(wVeC$JLB&T`9Kao}`<%M5 z`(?qFD32M2z)p@hs=gXj)nqIK7~Q(<@3r(RhUv>hQX!_jt33E<`7n{96v9vv!MQ}t z41sVQY2W}6n2_n8By;V*(Fan?4-!cSr=Xd`mvX{SMO@{iUNYoiMrA@7H^TG?Sf$9m z=NO1|dikHK=xjFsulSJuzx);8H`Ajb{blB%bgYMr#*l>gbFqikO54h_+=?5;!Ez&n z^pk)|n_BtW&>+$d9Cpj%dgOa_PtpIK{x5f=@dM7u88_tK1cLbPdAY)pEBZ%{+FJ+t z(BB=Dk2nxA2PXU`y%GM^^1f(^=aKr3WCVjEv zIruyBj5d*=qd&NEDRU|Q%Twb!{m@Pm^Psm0l)1?3+bHP7N0;<1wz6PT5t79hmj*0u z(yT&qjqHCgN7^)f&brwW65{}BOcrXS){6XLW&WwkycR2b#Ztw|1q&JgU#<~*@uaoOt_j^;H zLu7)4Fj#w3-6&GJ{6@xiz3Pzd5u7A{j(LvkzCSbo4+&?CT6YuU^=_UYG0}_KInhnpoO;k?P80`8Z$Z0LelU)H( zQlcU5UwE>PYP;_NA<(A+fmsn(KaU5y?@w@ZOYKlN-@fauehj%YnQ-+1^0^tDhICGb zKG+!8=(}-Ea`AKNTM&hhK{~$l-kAVAcKTdO+?1W#$L%k>P;{JC;9*N}Pqurk-6CCtnWbBGW1Km_f8SXZ-3g- literal 0 HcmV?d00001 diff --git a/assets/font/JetBrainsMono-Regular.woff2 b/assets/font/JetBrainsMono-Regular.woff2 new file mode 100644 index 0000000000000000000000000000000000000000..fdf95dde6b002eb2317b4c17f534b307e5153932 GIT binary patch literal 44688 zcmV)KK)SzoPew8T0RR910IrY#5dZ)H0u}540In^Skl%1nUhM$D&bc$Is~5sp5hJ5$TL~Y zt!P>5A~e}lQpB8uJ7(aKJEAAMOlnotvx%KxT&jpS?D1sCZPxEJ6lS01+JJ5ui6_|>t|-tK6B$szQz z2Jd}v0k0ew#$xcWqI0rG58?%YBFLO2jIAE0!|QDS9(b~ffmhB(Oh>c!z`u{aaTDKc z!fKv)>xrLAH5uJja=sZV$NaF)(ai^X<2X$N3n5OpfFoLZ`^N$YI@A(?&)qw^d+`^D zd=VUPxMfVKV|j^=4NGcnq5|?3Pr)V(!JhLeID4kPrd%-Kr=tjUq>oE2d-}9x^f1nu z)U}_Pe?zu1tJM*I@g%+AdCz;`O0E}7bms?tZoT(@>@SHt{-8IwK|3Oy*fpaYyD(}u zqcGuS46Cqa6kruLPEv*waRr1}ce)n{@Y;^25(Xcg3Y#l2;&vN%(`;$~2M8>!ybqqH z8*LS)L-k2h1~fVgf)mnTdZ}OSb(^EVeJsUdG?MX6re7v^sa0j}n>7w)Ku89m0-Ma< z0jBDKd$G8jhQ-43ET_SR4(AW>Yx7U?K%xXLiVKe8j=SdI-etdwdc+Y&90C#wsDL?> zZD24Ptm&9T9lBx6VpFGXhlMe}V}&`z?VBtA`%?8g=iKn#>jprDNI;~5WB7E^_k=7O zDw>{v;tGUAHCrp2lXj|}KaM!o?((;mQ zvf1*>^@=K881xEDX~Ls!PzVL~gwa%lO&B;|;s2Ye{e1u_$?uV7($D$JRyzSmL>#Kb zQeq`DV>^}Hh+An}T1xdVy{)F3bQ6zgJb~kJ9A{zB00SCHgI*v>CMSdUA9X1YQ_Ctm z^%ux4P{1V_uH}UFi&Dzz|tRSK;9?3B(zSOD?O?rOY}R_CW*YGbK8 zv(kNCORcogpuoJK;08%vDE568RsagZf&c&Rf6Jd0;Y{B-Bc9>!OA$n6_o4^f+t(&d zSQ6EU^7lC1u!CZS5GY4N{uz2T|nZIkrr z|KHMPclO^;`n%f;rBLmLovF#3-1lcl0*=m{&`G*`lDTd|W{oP;w z4tiE8{_XZw3iH&dzvw!`VpDRJg~ zmt#w8{;TFV8Pef7ko$y)KWokRD_t$H^O(6u5@Ql021OA{(=S<-rsB!4HL6cpV{Y?OJMuiJM(Ch z_PjfjS{huI$5EEuT@7)6g%m-J-7dd;hb1jaX*eKsW z)&1c!*;s5wZeRR&N#w;P>RXX*JMSC#66oedBHl5Qx;Q}m$eVQ#J-)^AT^z>yop4tX zarbwm{lWC-g)=<9K&U4YOHgPG7Kb+=5J_YTl|}~`Ad>~LIb0rJU}$7)A~Y3=B~qE0 zT%lB{%{5vWEhQ}jVz9Du@(PMd$||aA>Kd9jExfjluAaVup^-5}Ai^Xvg-W9{5GISw zamMBG1tvmMGjj_|D{C9)nlx+Cs!h8Y*DMG6?RxP6q|WG5@B ze5PbBGBGB>aPvCOjRl}ZsJ3)zV6SRYGR#_oVcjFtPm!)|{|9ONRd;{x0qSLD`= zG~H5LZSyPt3>3*jKH@vx!&`iY=NS$;;utfT;*_MKv(`&&Nh0Ya9toCEiRjrCc9nbiC2%Wid^!1ASd=MObr8u^I$>2KrA+BMX>k6cmFqS<&mzMkqhoW1 zaj~4{mnX0VzCJ}n)R(38S_jSU!d2-)c4uPVTX27W)PMH({{Fw(QMFU$_MI*|Erj&L z>FU#etL>fs|0&2Q@)}9W6VgIH%p0DPUvR8*H9EWAi4x$iCxvE{xj%*MYOS5AF_U7ZP5^ zNAOqp1n|@OH{QmV4RS^Xaf4ff;@|`Zy9QSWp9Gl@MuCtZJQKErqr&5<{3?2iSez4g zIHFm6DDjeBdLbQTd!!ucu?%veyq#K=`^rMzBVU(G6osObbBbQMqm-4iDBYu6S4vc& zRn=Q9)kmtWKByPfS9efLG1u53ISzw3$^_vFzno-7VXC zur6Bf5ILZUR44}BKzVeAPNG6oQ3E#EsrD_K*$)%i7W=7PFKCAa>O}s91&lMGx1S8JQAOcUnP_T1W8QtAQ8!d z#76od6;fTgO$nrv)2H-STAK9}WKr3@%*pm;cZ|dA-OO)xZD!AYW`z^X%JTjEMqbYE zCdl*h4~27)6^|w;#uhKdox)y@law*#gL1jNg7QFltNdO$SK|q)wCYL4D|2;T3adNS z&pOsgeW%XXN}W}k>&x{Us7vR0zL_WHM;V##n-|Q#HZt4zGEWSOCw{FL*$LTAg?XuI@u^6Dk??qSu|4;%AUeUp08}G(93fdrCw91 zIttL4)KS$=NyFrqoiwg-wQ0?HcBRl0^k@2-&cj(<)a}JKwgxjk?y$aWd^f$##(X9C z%~bQnoHXrZr*+$`mZgQX%e6z=4RlJEq$|{w=~jSlo9?`BTA!{5w29uXpR4cJ9|8T> z`Y8k4Kr`s1(cm_87`7Nr8txnZp1=qG=-V)-o!FNrsbv~)2%j5 znUl3b%5s#nz?P{c(B9`VdS3g%wuJ}mt zL^0m&q31CRwBS4-U}IV$&jxC+3M#560@T4RBSGX9ejLhMt1bU zVqHYwd7rs&dh@0hAVio5ncC=Su;E6VYNlnfZMMrkIZiw4qN^Ty<7*s!PIxljbo7#B zYN4zCMw)E8Ec0YDIpDC%?tAQ&681Ryg79Qiys1TrlctdlhMI1LHMTO@<*>`{xbIWS z`jRDZ6+kCSl1y#%HPCo7&9TfXW_hl%c@cYGk@2RJq#2{3CNpQ_#$o>X;6kvX)(X1k zo(vz)TNr>mP_4N_U{>Cqxe7q%<%A_H3X^ozeJ4RLLbxRkU>X%2!q5yKxPCRAZb6ep z35N@!(<2HZ0U2UBnFpB9)FMJzm=oB66z3W9A;^ul)aea2rhDbE6z5J~s6pz!B}yTz z8wnUjlzlxfZ_DTD>d!3QhA#UiO-LVq`^}z#)~zu)dzXV3v$qyBBBHK)3zxn+Eqytw zR3kRghwrbSx%5}(fXXLimSY^}?yL(7aM_k|G&I&vdw>tv0D2l-su9O;T)KXHZ*T-I zVvW$*KKx)^+zi#}6KEn<4PB&AA8v^<5_$TVjELn7oU7~?HEOY`s)|}~6{5yn1yi`X zs8yl8T>apJ ztIkO`CeF>?OLZP!pSU5A?(t+|)zC!>b9S@Oiiw{?yywXL=Z^9zSmR=N0Q;#fc4=cuY$xb!B2Q zG1;&%@a7F;Vv_l@?=ou!lYs_e?Wwy>dy4l}Xe@e@;|Mni&8JZRttk<10+W!;0iEs1 z>T|g>{)^Ff+m$@;vI1URFp6`p1x{6$%&h5ezX1Uxqyr}EN zo&l}jqG4&gxSD^!eR1F1--gr|c(SQvSJN_W2oKTSle_Cf^ zCG}X_+FP&t(1kDD?pbNS&~>bRcvoM=e?#~9oym?{eYNC0A1tuzKq%Gp&Sx6nqmS6K z^n^ZK_EMC~ei@C6EIGOl`BcwdQs+g@lg4@;$0v)ESQ+`c-^Rh(D`kr*zWa`qA5i79 zrL+-Wk=+KAX>OVJoogTtTts1&W zp}x1Oezaed*>84{T2$d5%*c^G7xh9y8GU6~Z6* z@6`20M6V_CRmgaa27-1Fw1c3XMQGNqiFDbxXZxiZ{PYu>e>WR3UIPyg&#N$BBQGzj zz6K7vYy3~s*W=H6{Mk;lT;oV5vsraG9H{1Aur6>p!5M#j`Ot5@enZw@_1U|n_vmoU z_JQBGPhI|{9m}yQ?7zp8|1j=9wAtHNt(fft|1N)BP8zN43VKZZzh3INLftjsa-w>= z?7}bnYG0T48lkh}sJ}0yE(g?YgFfm#SyJ8jsW^GB`Vi1X3U%6g7Z!Q?m@u*2pU(yy z_c+cjtU4!+V&LGkxtujZXU9=L7hacrn~lAho9CWm_e4mZ^QZ5q9{-b&yzm0U%U446 zcmurh3iG0o|13Go2 z9~|j3vg=XTe%?Xj5FN_qY4gFjemq?=rB3E3pz(vuk-=x5v1gJcb8lS$J^s24FP{ZO zXaCL$4`2Cajm6xx0baiI!v>`WJ2tJUkM%(m)QI;iSQ+{0C+2Ko{4MV6g@hk|^3xVM zF|aW3%P*e7DTh4qoLY2NtF5Q(z$P%+lDYE$e8$gR9Q@e9Ib^foAO7&yF89Q&MpWF# z`h9)%+?7T(adA0O3>+K|R0uMJBOJBSZ+{)M9`ED0pgMb`8OE>e-~pl$HLqbzAenp8 zL9yX`OrCXoVYS4YQxEXZZ5%jF&)}2EAbxEJc954B2EASZQK#c%Ih|E#Uk2Hxp!>XZL@L;78v1IihPb2!IwsEb|xI#5?ad-WftnJ`0I~MsBT7~+bR^ccJY>LXCK+?d^+w1%o z>$m?saJ=lUtlGkC9`Eh2Qhz_y;AsBzz=x-XfhCf7!q3%8K#&6q4{~y;6b4nukc3!8 zHUkoJ9*)EjBThx-o-$x~Sk!_+_!4m1h#i~xG?0)KV1kcj#k>t370MZ&4a&3dm^mAK zZsIF(oos>8Kt3=iikR*Kl2%XLLrXRbXTphn7JQnA)1Yb~JgNTORg7GK^vGr#@VP8G zwGLT|qPcWSB@4u?o>2<&;0yaDR$&dOKgwJ{dLY2w(`Eu{#!4wB zp5f5Ow2B^mTpS+UieHc{{4iuMD$P@rY-3`iNZLvmWizSF!&^3m-bi(c5$vqojQ_$g zY1KL>cT3_8;DUlMQ!U(G)HnRlt{!GjSTZb+*=3?917mWZ75~3L# zXE+U!-8MXKx9js1!v?|-`9N+JQjifx*N-o!Gbc%7#%j1H?B9DRMH^R4_VSTBx9sXiy9~$t&MCbZB=8U8;|wv zM&7)y5U#RU*SU!ve7}Iyl<5(JwF8>`X#yXw{ZLo2ac$;jDE4DYc>l z4gyO3o4BI`9pGRfQF%sps+}PULar+oU=v2{yiN+}fi8#}7-Q+kZgv$ZB~hw-PYq-5 zp7u!cw!M3E+TMNaC2zYr#nLz&GRY}U_F%g4Xva8#8Bm4-2&Zea9k6k5`LlguUnhbB z9CH`Oiq|?;DT0i(ipX`sB^rT2^H0H^Fwzhh$vG(@agfa-EEJe`;L}fRHXbm+8L(Hc z6tq$Rrl3>?U>m>yNUIGP1c(Fx=1c!1CDLkV(Begr%makekQo4^vw&q_|J_}u4zR0W z75zIk)C#EN(7sEodT&^f*r@!SX*)+OU75Mu%0;JAF}J?=sWsl^)e@izC72TrWtbNU zMaaX&_pYsn7P=7qdrI;?80@jnenW0K>bO%jIBl~t&f00rIbZw61?OG#t&J|b2pDo&I(dv|=e z`=gtqDd{or7lS-m^ako?p-yeu#npoz(WRw&bF~5hcr7l&V%_P8j)#ViLr6qSKuQJx z2{jFsDx3A_)o;Kcf)PYGzFbKF)H2{;4FH0n2H)hQLAg6RuV_a9o*U=6@?}PW-)xLU z>>(nK?tI`3SRernKwyIq#GnK#kO6rhKn^$z&KPb6Z$QH~#>P*OAP=3OE99UAH5kD2 z(1NpQ1>K{9|0P)#dI%4l;AbPAi+{YQvfc|p{AD3~fRMwR^%|c60#E?x8G)iS#~+1D zQXD>mTxj3pG7aLt1GwDOr=|b@zeJ)lb2$M19fzLV*TEKe6!vVtYy=K|0Jgrs$FK>0 z!5?ISo7<017PDm0PBZ7V#s;HiRIaKG4L*-`@PQ|bI*02 zS#``~ANTkt4ER`1-HVL@0BU>I!rb_r%`cA*N9BFr{;>n#^XB$P*ZQDw@TT@m%1Ut? zzkk@A6yRHy(0XNV(VApZ8f06lOq-onZQ4a;MnlIK|E0+>&9Qwve1hTGvto&4fX0+l zh*#u~(T8a16U%HJBNHbsLdE`}f z4UefbI=}##WkVq$=QG6SWX*RTU(gwg=@SEDrP~a5ApO@cq8=kvx2ss}&g05y!=^3g zPTl$(I)=f4#b-buAi*U?q(_2+#)gB(V`M@mWF{t}p{Amvq(!l6ikl17q!l*fJe)>Z z&^WNrO<)+qv|!1!8FQA+nzzW#>c;ERMF9f4aOT?Y$cb$`Lk_jSL-=iEY$7ympl;@C z*QGo=yap3;`K1rxCF*0V$|`CU;Go`^8A~yghm@pLJ}NZ z5|Wst1SLEXiA+?Y6O-7)rB3vTca|$$=_*&d#$#@}qK22X}C*B%?#1@W4175I9JsOKxy!!OU% zW3J!A1nIR`GLL6Q7SN7m&sa-z#G}#-ATYRs1Wu|+K?boP*dPb`1HN-%#??%T5L{+E zwVHKgF@$l9TT?Jh@6FV)r?{OCY%pMOZ2D{V!pI@ooqgmQmJ#BHRMR}^EXIp|Uxz~3 zY)FjY!JHmRKPsfgXKT=d{|7_lU&C5^Y{xBC@s`|mwk4g$v?2&GO1;uGW3ACW_*gsO5YaUa{(O|c+wmnl7RIDb5 zBAa#N&;X?B26gzEpsW-bqf=XW_C%We|DGZr&MqkOx({n6Zm1Wv3bqd(x#5X~#%cvM z=<(42CSqi6>?J24W-gL76H+@TsHe*D08Dcuj!%U&N(DM?rOkF|iih(tcx%GeJ+w-L z0?o5OeiE$uf62zQf5_ z(w>0a*IIye5sJ`BzmA@>FPU076MEB48)BlR4+F}Wqk%9nQ$|qLi0`Hu@RNo_;{HIY z_Z!YJlffFcJY8~V870SFqRbPfX3-jfRG$nEX0bVQZpInlf|6Z=%Nxkq!j`{DJHDj_ z3uNSvUZ^h+8VC#x1&&67K&2p2B`8!28tn)MjRmiF3lTy~s_N^O7nGzglOaRxZi9+q z5x@0&#bx~c&;&D#gV-LOan!r{MjH(irWiAfImQBGiLt_1V{9<)V3&QM(pnw74jfdd z#f7caa#d*I`-&SmL`4auOg|6F5r7dynwGdE-o5%(ZYW2`>?WO~xuSL}@o1W*?zaA^ zQfQp8rt0?0DewyGo-@pVvy|mrQs7bXQX0h}zD%fpCrkNMb2=ZAbD+Wq_XP2%B*VZ3 z%1hProG!aXNLjIU>=iPTy*ns{L(EKO;B)$QQ`)i-e$TkxvB?dWO1tMD@{!jd3ij8i z@SS!^y>r3W6eoU-JG#G7a#J-|U? z*Y4A=nGN-*w9JQ^J*w>a@YmlkYiECCT-Cil*N=kNG26t)z?(=DtMuzU(`C02R4jSO z(c?A>-PAMJ6OrM_$cD$!f(oT9!7h9P*`IOO{S2$~kBgB813lowuC8RsSY3%*#SuN( zi|a=mQJ?b26t_}Il*r`cr}fbUOjdD8;Vv? zqD!-yQ@iJ(%$-8_`|1S@K%?MABfV51;bqg=EA2Gdt39CCDkQva`uYtV0H`D0bW&F( z;VoCaT?2@Bob+y$g!f!$?=MG#ebC$K!wLx>na)0Lr`bN~0exB_L4alEHcsFUxC`!q z```h12p++6j4Tk|JgIF%4UwW}8N>TK%p^}PX;+bWnW#DoAyS7l;k*U?J>77xKGBd& z$F&$!#2m3itV5tJ(Vk2XD9#yk#oRH^5a>FuA9z{D3UF%2Jso3iP;WX@lO z0nd2I{p3LX?~#}RsCI%E005H=41gaCjClbp=^?U zn57%VnbC*<*GlzKSR4&!$}k>-EB$R`E<+5*aP0)QIfI7rTbwV1^M*kJ8{}5P^CA6C z#I@I%1Uda9zbwW6Q8d_7o44}J40`IY;qO&XR8 z9X@7A7+BlOVrr~WmoTrV(EPrRk{x8jMuH`CyjWHda3Pf&nCk(O+ks||)r&1g)a6}u~K zrA~`8BfUA&F`mqcvQ#rmXKK&w<|M__&ALTPwucw8Q+(PNDIRhQh(IEs*ik8!L4v%5 zMn7|=M+7N?bJ?|eTPrv?MX4eC8m#X;gL9mK6cGWO*)7}MO>57S7QVEjFJp;60-94X zrf`g(Iq9{*m{xS-$fVVZQrm4i?v^u{j0?u^Fd@;GHtSq?LgR5S=4wyWsh+?4MoHG^ zn|Yl~%Qh((WLU5*o7next`0TfBKKQ}>|158gDI*NFP96QA#5<-6d!DzwVq`~qf@Y| zI?Y<^)*B;!-BhMX%XuBkzM7gm$kvJ>k1W7R!tUrQAz|9zhYv z;h^9fI-YP3F1SV(s-oED3wgMX$<)~a9hTLS@9*bjwL)_FqGnTnRbN$WOyX}%>WQnp zskGMC0{N-jy)Ff&AYE~pwMv1bqJe9INOS-zC5u*6--#m0vD2>X9oM-CKeL_RzyI*<#CpZwonoyhcP_Grb((fqzUk0J>ppytPWvD_W{}It{f1 zoUPN$n&BC}N`b)PkRn!!uF+zNguF~|L9<~y{Tm-cq?!>Vv4o&5;1~9TCAktDF0?OE zHBBsn2W5P&#R!5z(-a0IIP5qX3MCjrpEom9WM`GaXSgiLMTX7g9G~zUG56oFn*v*- zZEK%@2rHIX)F?}xW1oo`=urJ_3aP(UA5Qs%rJT2Bv1y-2g7ne^C>vsqRt(aRQYKfq zR-(+JGJgy=c7C~NsT<5M+gCC$sc}Lc^@g1rRuxinSFsGQG0z8_wtMYW))SvNQSySF zQ#;mq?1afrVDylS$96}Of!akB$Rndz8x2D2Q&(|Wxu~s!*+%qUWKSYLnbjhtRyr8D z8#)RB`KDb$C#x-9utQbUz9dVLa-Z!XyxwMnHgB^}i1eJp$-KV5EmzEfJ0G(q` zPb2`qTN4?W?gfcSBbYD;ao-JBoL%~FeLp~(%4Tx0AtDL)){?7y z*-82x37cXJ(oLLX(x~1gHmnWmt~=|pNQu$oe3L++0~!CZ0Iq7d8%1&#+Tk#^F-H%mrL9)x#JO|xPST$9F*X%Vi46iCT1&F;fGhOHLvulss zJZO-VN!PLCw;n93B$*$2(hauG24u4uqRn$9j zouvEiz;<>C5+ObJh&o(g{YdT4$KWCMJ-CfdR7jPBjID3V0EjfHBW4(Tx8d6UBX35S zRaVjhc%sWWD!sjBbYvvB7^a>5mT})m&DU+}Nn_F-kb+gnBWrdB%1o;lL4X{pV(;H9Atp5jpDT%Jf{|Hm z*NCPwryfNV)sy26n%3C`c* zCASdoF_M%3PR^ zEJOETG=XVb+g0J(C+c1Id_wmGm9P5uGpWp6V6IQ@oWzN8RF+A)!FyG_Yxb3s=M$`wG}>;i6k?4T*$IJDc{=AjUVg1G|MLY@)_mmPzS5p2cz?p zlUD;i(G6KO(fg*lY-}*X#&bYW#-RH*1&s-r=OrzLVL}lEo(H6o(N>D^Bb8CrT4m#P zOMOO#V>6SK5A0=}Fw)V939D0zhSxScWGAI$YagpNwAFqy(nr3|QSCQUflYl&QZ^)$ z3ar+dlehme_(Y%Ol%AaWjRc&}K`yTrv^t5`boL!aDt~@RiQDCh>pRuqH}|vE!ts<( zN&3SyB7HX<{;4q&(D)af-@-~V_IFv|X?PwhhdMT6Ehr_Y z**h(l9�fI`z+*nJkn6Iz@+GVb{Wvm~3SF@bMQ-TedUwtoOt+B5P zC7T{ogZ_Fw;{+T+A2s$~E@8e#m4fs->xoAUPu|C3xZ%+3sM$k?&Mbz0w@BMgON+)* zb)l@zFE(N?4bSCi66!)AEo1_XeFm{+&d`kTAbC-8H@7i9&u`|7<}(bWUNl=yYn0D| zWBdrVv6=;#A|PYGH4QyP&@idLZen2U@iFkod)A~WNme?~z$*IQjn0Is&v@L$r82;x z;B@}D=EPi?uqin!#-VOfTmw#dG=9ftJfGUC4ZJwC zqq)*o?vsv~fz^DWJ!*2v*WWU~M}p&eZ0uV7t(LKAHk}@8p^=GNO=#s z>!@E3wyX>Q&t0|tg$j?8zd!kX7gmfoVXY`>SVcr>_$E@aObalJ0iR9&P9O$1=0c1h z1%_yWiKsf5<<$uDJ>>84mYp?Z3>hL|c zqDB@3%@ej`@ZdOIJ?|JDr=~Nvzl8YK8lb@ixgvtR?Mj!#@-2ej) zr6%)QqNRvw&COgPZ3U7KE&PHP>FD-6xG*~JR2A}Mb_fvgWtFN|UKyI>G<%~OHl}1U zvEt#li$EVJngGvJD#@`_e#(GzQg|tppv!VKV*Dc^w^XJq@q8HESrU{FmI^vL846b@ zjq2eCa#*(F(Y0yIzN)R}3wdxiNADdN(oVL>*^7nWrX);E-r0C zMWYedA~4Jf!yWS5q~@2iup*~{N!9g8_XS4DakOiDwq4baDtJ|CwB5fJB#M`xvLE%h z$Q>!Uq%g$5KT5 zo^Lg6F22!eG^2^!Nb$@@rFnoarwHY+B^<*t-Fp1Z!nVci*1gT_|JmTUoU4w^HDQXK zp*>n8fKV748jE^CjxjFiL@#R-74LL{z1`^z=a;9ouo9mYg6#>sbk2%s90I8amlAn<%6X<6U>_hx`b7Pa0XmP)#{uYeWU1R^84z) za=IRKCT%)MB8B2uq+vRsA4su5PHUZqrS<+OiUF&@TtyB?)y6AkuKYNWLy;>ONDoY$P%Wd}-A0NfVL+jJO` zm3};e@5WEvmWZQO^P)G|jB63a^{6W~PD%A9!*}8#j*O5?wS9EBeJ;9nsa==BHn^oP z{ETEOlw%FPRMnoHqMBAapjUxWjfPa3RSc`o27U6Bhz+tf=CERY zBUCu=&DhCJtx?9zQ)7;KTBeA1X!@tj^Kp&)gjunzz!Vdt{h7!YSPS{?scU9EUunfW z8?>yZ*NP-+k8X2@#!6mfMysqNyH*NtfkgQ+Hzs^O1=29gn&sGcM7l|-b^rg3edS$I z(Hsc(J3U8?VMcrcf|sUZzZXF^drskQskeR^&H| zP?SrhMplhoH359A?jWwSOC0h_c^YXr9F@t#na_(iPxzQ|JYPoig3D>_^{gN1ZUUp< z{{+&7fup^GNdEYnFe)iQfzd4(wR_0bZ|D$X4a;J?sP?g^$;of!#IS?hC8G}ZNP(?x zJ6r{rW>8U#bi!*X0I>MDo24{Ml+==!=wE&LBu->9VSEf^#d=YL*E$KfGLA8OLef3u zk*;BOFe?ov(0fbUqbjKUE95~Om~wsSq;)5f&#Tva4Ez}I;P5qDU-IM>&_2CB0e6x7Gu0Q$fd1 zG#Kn#S<#Ybp_Xp6g#+-A@ze_=QmtG}!-XD(?R#3#q-HxPXsB6$)dQe}0<9DDg5(oh zuAFwW@?Mn{*F~!q8LW?xG)?9~hkaL;NktURPCD-%nx&mG+;3mCD4ekZlX<=ywggTT zM0pQzB~OnnYjdue=0TE?mW~&eP$Brwjr*117?I3Y(1`fP-;Nse86D)}zoV9YWSU0F zGy|_1$?c^BC5q9x7C48|xFnrs#lT8-QiYzSJM!IFOnc_hY(1LT z8yC}mZRWRezjt~fYEW*(LIvxB&&2orq)p?eb5lOOdpe7<;pIBNk8zA(R=aO9*Jv8! z`#hAJ><$V#HCwu0F>m%FyW&AX3!YwKI+*Lt*^L;S#i-kX-`*EIM#dqq zEl9MsB_aHzDK_N-6aQCjl-GsJ!LR5d2ri0Nb;o$cRr4O(N>)?@3l+)>7^>WGvePr1 z=foQNj=0p72;fP=tievh8FE2d9z&ur3#EW{YA*pE%+VhwN`I%H14JBbg!PV#(ll%I z<$lC~aBh)^rcAdavRnwRz^G0+MMsu(nmC()KNv!uX+<%RT?TzhSk|Pfx z-Kgtj9uy_={CuBt@N2vfjyUiKB=!2s5>>iB*&wYLv}P22RLUJ3+owcJjrGkJswWa; zfmG7eo=c^r(lN)2=LcC+tWEF$+`6*(DBaeL3sL2%8lwvk|n=<%{;50a^3tgv{DXQ*F{XWwxmBgF7 zOADpa@9SibI6xY_Zu4{wT!!)-1V$0n-c7hsU;V< zzF>~NbGRp+_iRE3^32^M9P(A?YZ?$_(x6M7Un&ugJzs25H$OFQd;%)iLyjS?GLI!N zuo}3GNwnlvhI5Qw-47vIxn%YCsx`sW=@w_rSl%gU@ybGH`B=}TjeD~{pf4)-#R6wF4lDukX=W*8y((#>+4ZgwO8K(g!PO}my zsX58l>PcgJ0N+5~DdXeRIC+!K)rm^`;gxo%$eamb+d#_^9f3KNI@oSK)TZcQcRPj0 zXZx>R-T&}><9gY+zJL9OT)k$!zKJ4_nOHc8w(=Zf-UTg$gU4sy0MJ7H9F@SXtH;&MmmD$JXcb9_&i+B-Wv{H4aynn7I3{ zS-@fZ@RA}^9I*OlP!D27Z=1R#vKmCDj0J>PWM%W|2)MN+YM<7Rh3q9Pin3VV%FnA( zzouhs)@d8F&3+(0c4!R0ud*9xeYbj(c|B{-T<3FJ)Do9#h&~5=)lgVc>SV~}+E52|Bqz52%wZ%kcQLO2bRZ2D#N3}0% zQi8Tl)c2n`OoC=Tsr79ojm%ez+OZpH@a_}iMnrUUyX3(8K^+Itfa5wUh;X)bco>Mn z=2n8^g8e9r!lVq8T^WeOxVvm@zj;SL`$$m6bPFIKi9NvP0D-%38FuX+WVh-)<`REt zn{Z5#K#B*qNpKY45CWTD;33|06CiuO20Nu2Z^l3!9E7+7&cf3kxZX0jEY_iZX8