hesamation commited on
Commit
9940f8d
·
0 Parent(s):

first commit

Browse files
.gitattributes ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ *.7z filter=lfs diff=lfs merge=lfs -text
2
+ *.arrow filter=lfs diff=lfs merge=lfs -text
3
+ *.bin filter=lfs diff=lfs merge=lfs -text
4
+ *.bz2 filter=lfs diff=lfs merge=lfs -text
5
+ *.ckpt filter=lfs diff=lfs merge=lfs -text
6
+ *.ftz filter=lfs diff=lfs merge=lfs -text
7
+ *.gz filter=lfs diff=lfs merge=lfs -text
8
+ *.h5 filter=lfs diff=lfs merge=lfs -text
9
+ *.joblib filter=lfs diff=lfs merge=lfs -text
10
+ *.jpeg filter=lfs diff=lfs merge=lfs -text
11
+ *.lfs.* filter=lfs diff=lfs merge=lfs -text
12
+ *.mlmodel filter=lfs diff=lfs merge=lfs -text
13
+ *.model filter=lfs diff=lfs merge=lfs -text
14
+ *.msgpack filter=lfs diff=lfs merge=lfs -text
15
+ *.npy filter=lfs diff=lfs merge=lfs -text
16
+ *.npz filter=lfs diff=lfs merge=lfs -text
17
+ *.onnx filter=lfs diff=lfs merge=lfs -text
18
+ *.ot filter=lfs diff=lfs merge=lfs -text
19
+ *.parquet filter=lfs diff=lfs merge=lfs -text
20
+ *.pb filter=lfs diff=lfs merge=lfs -text
21
+ *.pdf filter=lfs diff=lfs merge=lfs -text
22
+ *.pickle filter=lfs diff=lfs merge=lfs -text
23
+ *.pkl filter=lfs diff=lfs merge=lfs -text
24
+ *.png filter=lfs diff=lfs merge=lfs -text
25
+ *.pt filter=lfs diff=lfs merge=lfs -text
26
+ *.pth filter=lfs diff=lfs merge=lfs -text
27
+ *.rar filter=lfs diff=lfs merge=lfs -text
28
+ *.safetensors filter=lfs diff=lfs merge=lfs -text
29
+ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
30
+ *.tar.* filter=lfs diff=lfs merge=lfs -text
31
+ *.tar filter=lfs diff=lfs merge=lfs -text
32
+ *.tflite filter=lfs diff=lfs merge=lfs -text
33
+ *.tgz filter=lfs diff=lfs merge=lfs -text
34
+ *.wasm filter=lfs diff=lfs merge=lfs -text
35
+ *.wav filter=lfs diff=lfs merge=lfs -text
36
+ *.xz filter=lfs diff=lfs merge=lfs -text
37
+ *.zip filter=lfs diff=lfs merge=lfs -text
38
+ *.zst filter=lfs diff=lfs merge=lfs -text
39
+ *tfevents* filter=lfs diff=lfs merge=lfs -text
40
+ *.jpg filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,5 @@
 
 
 
 
 
 
1
+ node_modules/
2
+ *.log
3
+ *.env
4
+ *.cache
5
+ python/**/__pycache__
README.md ADDED
@@ -0,0 +1,48 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ title: The Distill Template
3
+ emoji: 🌌
4
+ colorFrom: yellow
5
+ colorTo: purple
6
+ sdk: static
7
+ pinned: true
8
+ license: apache-2.0
9
+ header: mini
10
+ app_file: dist/index.html
11
+ thumbnail: https://huggingface.co/spaces/nanotron/distill-blog-template/resolve/main/thumbnail.png
12
+ short_description: Craft Beautiful Blogs
13
+ ---
14
+
15
+ Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
16
+
17
+
18
+ Instruction to install and run locally
19
+
20
+ ```bash
21
+ npm install
22
+ npm run build
23
+ npm run dev
24
+
25
+ // If you want to change something change it in src/....
26
+
27
+ // Once you are finished
28
+ npm run build
29
+ // And commit the dist folder
30
+ ```
31
+
32
+ ## Loading HTML fragments:
33
+ There are two way to load HTML fragments:
34
+ 1. Compile them into html during build time
35
+ 2. Fetch them and insert them during run-time
36
+
37
+ ## When to use what
38
+ - Use compile time fragments only on parts which you want to ensure are seen by every user right after page load (e.g logo)
39
+ - Use run-time fragments for everything else so that the final HTML is of reasonable size (<1MB idealy)
40
+
41
+ ## How to add a new fragment
42
+ - Add it to the `src/fragments` folder (e.g. `src/fragments/banner.html`)
43
+ - For run-time fragments, add {{{fragment-name}}} to appropriate place in `src/index.html` (e.g. {{{fragment-banner}}})
44
+ - For compile-time fragments, add <div id="fragment-name"></div> to `src/index.html` where you want to insert the fragment (e.g. <div id="fragment-banner"></div>)
45
+
46
+
47
+ ## How to know which fragments are available
48
+ - Run `npm run dev` and look at the console for available fragments
assets/images/256px-PDF.png ADDED

Git LFS Details

  • SHA256: 48b7ab9362d78d22ca0d66b2943406759e85cffb86b585176990035d12ac2c7d
  • Pointer size: 129 Bytes
  • Size of remote file: 5.46 kB
assets/images/placeholder.png ADDED

Git LFS Details

  • SHA256: c121166b2de694f4bb71dca004c8f413899016751eed5daa1570ba0d5ad9faec
  • Pointer size: 130 Bytes
  • Size of remote file: 53.1 kB
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json ADDED
@@ -0,0 +1,41 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "dependencies": {
3
+ "d3": "^7.9.0",
4
+ "katex": "^0.16.11",
5
+ "lodash": "^4.17.21",
6
+ "papaparse": "^5.4.1",
7
+ "plotly.js-basic-dist-min": "^2.33.0"
8
+ },
9
+ "name": "blogpost",
10
+ "version": "1.0.0",
11
+ "description": "--- title: \"The Nanotron Gigablogpost\" emoji: 🍷 colorFrom: pink colorTo: red sdk: static pinned: false header: mini ---",
12
+ "main": "index.js",
13
+ "scripts": {
14
+ "dev": "webpack serve --open",
15
+ "build": "NODE_ENV=production webpack"
16
+ },
17
+ "author": "",
18
+ "license": "ISC",
19
+ "devDependencies": {
20
+ "@babel/preset-env": "^7.24.6",
21
+ "@swc/html": "^1.10.17",
22
+ "babel-loader": "^9.1.3",
23
+ "clean-webpack-plugin": "^4.0.0",
24
+ "compression-webpack-plugin": "^11.1.0",
25
+ "copy-webpack-plugin": "^12.0.2",
26
+ "css-loader": "^7.1.2",
27
+ "fs": "^0.0.1-security",
28
+ "handlebars": "^4.7.8",
29
+ "html-minimizer-webpack-plugin": "^5.0.0",
30
+ "html-webpack-change-assets-extension-plugin": "^1.3.1",
31
+ "html-webpack-plugin": "^5.6.0",
32
+ "image-minimizer-webpack-plugin": "^4.1.3",
33
+ "sharp": "^0.33.5",
34
+ "style-loader": "^4.0.0",
35
+ "svgo": "^3.3.2",
36
+ "webpack": "^5.91.0",
37
+ "webpack-bundle-analyzer": "^4.10.2",
38
+ "webpack-cli": "^5.1.4",
39
+ "webpack-dev-server": "^5.0.4"
40
+ }
41
+ }
python/memory/formulas/__init__.py ADDED
File without changes
python/memory/formulas/explorations.ipynb ADDED
@@ -0,0 +1,191 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "cells": [
3
+ {
4
+ "cell_type": "code",
5
+ "execution_count": 1,
6
+ "metadata": {},
7
+ "outputs": [],
8
+ "source": [
9
+ "from utils import activation_memory, param_grads_opt"
10
+ ]
11
+ },
12
+ {
13
+ "cell_type": "code",
14
+ "execution_count": 48,
15
+ "metadata": {},
16
+ "outputs": [],
17
+ "source": [
18
+ "def activation_memory(\n",
19
+ " a, # attention heads\n",
20
+ " b, # micro batch size\n",
21
+ " h, # hidden dimension size\n",
22
+ " h_ff, # feedforward dimension size (often h_ff = 4h)\n",
23
+ " L, # number of layers\n",
24
+ " s, # sequence length\n",
25
+ " mixed=True,\n",
26
+ " recomputation=\"none\",\n",
27
+ " ff_activation=\"relu\"\n",
28
+ " ):\n",
29
+ " \n",
30
+ " # https://arxiv.org/pdf/2205.05198\n",
31
+ " if mixed:\n",
32
+ " bytes_per_value = 2 \n",
33
+ " else:\n",
34
+ " bytes_per_value = 4\n",
35
+ "\n",
36
+ " one_layer_attention = s * b * h * (bytes_per_value * 5 + 1) + ((2 * bytes_per_value + 1) * a * s * s * b) # eq (2)\n",
37
+ "\n",
38
+ " if ff_activation == \"relu\":\n",
39
+ " one_layer_feedforward = (s * b * h * bytes_per_value + (s * b * h_ff * bytes_per_value) # inputs of 1st/2nd linear layers\n",
40
+ " + s * b * h) # dropout\n",
41
+ " elif ff_activation == \"gelu\":\n",
42
+ " one_layer_feedforward = (s * b * h * bytes_per_value + (s * b * h_ff * bytes_per_value) # inputs of 1st/2nd linear layers\n",
43
+ " + s * b * h_ff * bytes_per_value # inputs of activation function (not really necessary for Relu)\n",
44
+ " + s * b * h) # dropout\n",
45
+ " elif ff_activation == \"swiglu\":\n",
46
+ " one_layer_feedforward = (s * b * h * bytes_per_value + (s * b * h_ff * bytes_per_value) # inputs of input/output linear layers\n",
47
+ " + s * b * h_ff * bytes_per_value * 3 # inputs of activation function\n",
48
+ " + s * b * h) # dropout (note that dropout is lower-precision - boolean)\n",
49
+ "\n",
50
+ "\n",
51
+ " layer_norm = s * b * h * bytes_per_value\n",
52
+ "\n",
53
+ " if recomputation == \"none\":\n",
54
+ " one_layer = one_layer_attention + one_layer_feedforward + 2 * layer_norm # eq (2)\n",
55
+ " elif recomputation ==\"selective\":\n",
56
+ " one_layer = s * b * h * 34 # eq (6)\n",
57
+ " elif recomputation ==\"full\":\n",
58
+ " one_layer = s * b * h * 2\n",
59
+ " else:\n",
60
+ " raise ValueError()\n",
61
+ " \n",
62
+ " input_dropout = s * b * h # section 4.3\n",
63
+ "\n",
64
+ " total = L * one_layer + input_dropout\n",
65
+ " \n",
66
+ " return total\n",
67
+ "\n",
68
+ "\n"
69
+ ]
70
+ },
71
+ {
72
+ "cell_type": "code",
73
+ "execution_count": 51,
74
+ "metadata": {},
75
+ "outputs": [],
76
+ "source": [
77
+ "a = 16\n",
78
+ "b = 3\n",
79
+ "h = 1024\n",
80
+ "h_ff = 4 * h\n",
81
+ "L = 1\n",
82
+ "s = 7 # 128000\n",
83
+ "recomputation = \"none\"\n",
84
+ "mixed = True\n",
85
+ "ff_activation = \"swiglu\"\n"
86
+ ]
87
+ },
88
+ {
89
+ "cell_type": "code",
90
+ "execution_count": 52,
91
+ "metadata": {},
92
+ "outputs": [
93
+ {
94
+ "data": {
95
+ "text/plain": [
96
+ "1086960"
97
+ ]
98
+ },
99
+ "execution_count": 52,
100
+ "metadata": {},
101
+ "output_type": "execute_result"
102
+ }
103
+ ],
104
+ "source": [
105
+ "activation_memory(a=a, b=b, h=h, h_ff=h_ff, L=L, s=s, recomputation=recomputation, mixed=mixed, ff_activation=ff_activation)"
106
+ ]
107
+ },
108
+ {
109
+ "cell_type": "code",
110
+ "execution_count": 18,
111
+ "metadata": {},
112
+ "outputs": [],
113
+ "source": [
114
+ "from math import log\n",
115
+ "\n",
116
+ "def format_bytes(bytes):\n",
117
+ " sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']\n",
118
+ " if bytes == 0:\n",
119
+ " return '0 Bytes'\n",
120
+ " i = int(log(bytes, 1024))\n",
121
+ " print(i)\n",
122
+ " p = 1024 ** i\n",
123
+ " s = round(bytes / p, 2)\n",
124
+ " return f\"{s} {sizes[i]}\"\n",
125
+ "\n"
126
+ ]
127
+ },
128
+ {
129
+ "cell_type": "code",
130
+ "execution_count": 19,
131
+ "metadata": {},
132
+ "outputs": [
133
+ {
134
+ "name": "stdout",
135
+ "output_type": "stream",
136
+ "text": [
137
+ "4\n"
138
+ ]
139
+ },
140
+ {
141
+ "data": {
142
+ "text/plain": [
143
+ "'22.13 TB'"
144
+ ]
145
+ },
146
+ "execution_count": 19,
147
+ "metadata": {},
148
+ "output_type": "execute_result"
149
+ }
150
+ ],
151
+ "source": [
152
+ "format_bytes(activation_memory(a, b, h, L, s, recomputation))"
153
+ ]
154
+ },
155
+ {
156
+ "cell_type": "code",
157
+ "execution_count": null,
158
+ "metadata": {},
159
+ "outputs": [],
160
+ "source": []
161
+ },
162
+ {
163
+ "cell_type": "code",
164
+ "execution_count": null,
165
+ "metadata": {},
166
+ "outputs": [],
167
+ "source": []
168
+ }
169
+ ],
170
+ "metadata": {
171
+ "kernelspec": {
172
+ "display_name": "jupyter",
173
+ "language": "python",
174
+ "name": "python3"
175
+ },
176
+ "language_info": {
177
+ "codemirror_mode": {
178
+ "name": "ipython",
179
+ "version": 3
180
+ },
181
+ "file_extension": ".py",
182
+ "mimetype": "text/x-python",
183
+ "name": "python",
184
+ "nbconvert_exporter": "python",
185
+ "pygments_lexer": "ipython3",
186
+ "version": "3.10.14"
187
+ }
188
+ },
189
+ "nbformat": 4,
190
+ "nbformat_minor": 2
191
+ }
python/memory/formulas/utils.py ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ def activation_memory(
2
+ a, # attention heads
3
+ b, # micro batch size
4
+ h, # hidden dimension size
5
+ h_ff, # feedforward dimension size (often h_ff = 4h)
6
+ L, # number of layers
7
+ s, # sequence length
8
+ mixed=True,
9
+ recomputation="none"
10
+ ):
11
+
12
+ # https://arxiv.org/pdf/2205.05198
13
+ if mixed:
14
+ bytes_per_value = 2
15
+ else:
16
+ bytes_per_value = 4
17
+
18
+ one_layer_attention = s * b * h * (bytes_per_value * 5 + 1) + ((2 * bytes_per_value + 1) * a * s * s * b) # eq (2)
19
+ one_layer_feedforward_mlp = (s * b * h * bytes_per_value + (s * b * h_ff * bytes_per_value) # inputs of 1st/2nd linear layers
20
+ + s * b * h_ff * bytes_per_value # inputs of activation function (not really necessary for Relu though)
21
+ + s * b * h) # dropout
22
+ one_layer_feedforward_swiglu = (s * b * h * bytes_per_value + (s * b * h_ff * bytes_per_value) # inputs of input/output linear layers
23
+ + s * b * h_ff * bytes_per_value * 3 # inputs of activation function
24
+ + s * b * h) # dropout (note that dropout is lower-precision - boolean)
25
+
26
+
27
+ if recomputation == "none":
28
+ one_layer = one_layer_attention # eq (2)
29
+ elif recomputation =="selective":
30
+ one_layer = s * b * h * 34 # eq (6)
31
+ elif recomputation =="full":
32
+ one_layer = s * b * h * 2
33
+ else:
34
+ raise ValueError()
35
+
36
+ input_dropout = 0 # s * b * h # section 4.3
37
+
38
+ total = L * one_layer + input_dropout
39
+
40
+ return total
41
+
42
+
43
+ def param_grads_opt(
44
+ h, # hidden dimension size
45
+ L, # number of layers
46
+ s, # sequence length
47
+ v, # vocab size
48
+ k=8, # parameters for optimizer (Adam: 8 = 4 bytes moments + 4 bytes variance)
49
+ mixed=True # mixed precision training
50
+ ):
51
+
52
+ # https://michaelwornow.net/2024/01/18/counting-params-in-transformer
53
+ # note: this is without GQA or MQA
54
+
55
+ emb = h*(v+s)
56
+ one_layer = 12 * h**2 + 13*h
57
+ other = 2*h
58
+
59
+ n = emb + L * one_layer + other
60
+
61
+ # 3.1 https://arxiv.org/pdf/1910.02054
62
+
63
+ if mixed:
64
+ k += 4 # additional full precision weights
65
+ bytes_per_paramter = 2
66
+ else:
67
+ bytes_per_paramter = 4
68
+
69
+ return bytes_per_paramter*n, bytes_per_paramter*n, k*n
python/memory/measurments/act-mem-2/README.md ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Activation Memory: Part 2
2
+
3
+ Code accompanying the deep-dive [blog post on activation memory](https://determined.ai/blog/act-mem-2).
4
+
5
+ - The main utility code is in `act_mem.py`.
6
+ - Basic transformer layers are implemented in `layers.py`.
7
+ - The scripts `{block,mlp}_script.py` demonstrate how replacing `GELU` with `ReLU` affects activation
8
+ memory.
9
+ - `attn_script.py` shows the cost of activation memory in the attention layer.
10
+ - Tests of the code are in `test.py`.
11
+ - See `requirements.txt` for versions the code was built against.
12
+
13
+
14
+ ## Contributors
15
+
16
+ - [Garrett Goon](https://github.com/garrett361)
python/memory/measurments/act-mem-2/act_mem.py ADDED
@@ -0,0 +1,132 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Any, Iterable, Optional, Union
2
+
3
+ import torch
4
+
5
+
6
+ def B_to_GiB(bytes: Union[int, float]) -> float:
7
+ return bytes / 2**30
8
+
9
+
10
+ def get_tensor_bytes(tensor: torch.Tensor) -> int:
11
+ """
12
+ Returns the bytes of storage a given tensor takes up. If `tensor` is a view of a larger tensor,
13
+ this function only returns the bytes associated with the view.
14
+ """
15
+ tensor_bytes = tensor.numel() * tensor.element_size()
16
+ return tensor_bytes
17
+
18
+
19
+ class AllocatedMemContext:
20
+ """
21
+ Context manager which captures the allocated GPU memory at context exit and the change between
22
+ enter and exit.
23
+
24
+ Only includes `allocated_bytes.all.`-prefixed keys in `memory_stats` with all readings converted
25
+ to GiB.
26
+
27
+ Example:
28
+
29
+ ```python
30
+
31
+ ```
32
+ """
33
+
34
+ def __init__(self) -> None:
35
+ # Ensure CUDA libraries are loaded:
36
+ torch.cuda.current_blas_handle()
37
+
38
+ self.before: dict[str, int] = {}
39
+ self.after: dict[str, int] = {}
40
+ self.delta: dict[str, int] = {}
41
+
42
+ self._mem_key_prefix = "allocated_bytes.all."
43
+
44
+ def _get_mem_dict(self) -> dict[str, int]:
45
+ return {
46
+ k.replace(self._mem_key_prefix, ""): v
47
+ for k, v in torch.cuda.memory_stats().items()
48
+ if self._mem_key_prefix in k
49
+ }
50
+
51
+ def __enter__(self) -> "AllocatedMemContext":
52
+ self.before = self._get_mem_dict()
53
+ return self
54
+
55
+ def __exit__(self, *args: Any, **kwargs: Any) -> None:
56
+ self.after = self._get_mem_dict()
57
+ self.delta = {k: v - self.before[k] for k, v in self.after.items()}
58
+
59
+
60
+ class SavedTensorContext:
61
+ """
62
+ Context manager which captures all tensors which are registered as being saved for backwards
63
+ within the context window. Does not work with `meta`-device tensors.
64
+
65
+ All saved tensors are stored in the `saved_tensor_dict` attr, which is an instance of torch's
66
+ WeakTensorKeyDictionary with tensor/data_ptr key/value pairs. Some of these tensors may be
67
+ views of the same underlying storage. The total memory of all saved tensors in bytes, accounting
68
+ for redundant views, can be accessed through `saved_tensor_mem`.
69
+
70
+ Use:
71
+ ```
72
+ model = ...
73
+ with SavedTensorContext(ignored_tensors=model.parameters()) as saved:
74
+ # Do some computation with `model` and capture saved tensors which are not model weights
75
+
76
+ ```
77
+ saved.saved_tensor_dict # WeakTensorKeyDictionary of all saved tensors.
78
+ saved.saved_tensor_mem # bytes from all saved tensors (activation memory).
79
+ """
80
+
81
+ def __init__(
82
+ self,
83
+ ignored_tensors: Optional[Iterable[torch.Tensor]] = None,
84
+ ) -> None:
85
+ # Track ignored tensors by their storage's data_ptr. Important to use storage's data_ptr,
86
+ # not just the data_ptr of the tensor itself.
87
+ self._ignored_data_ptrs = (
88
+ set()
89
+ if ignored_tensors is None
90
+ else {t.untyped_storage().data_ptr() for t in ignored_tensors}
91
+ )
92
+
93
+ # Use WeakTensorKeyDictionary instances to save non-trivial tensor references, since these
94
+ # won't keep the tensor alive if the only references to the tensor are within this object.
95
+ self.saved_tensor_dict = torch.utils.weak.WeakTensorKeyDictionary()
96
+
97
+ def pack_hook(saved_tensor: torch.Tensor) -> torch.Tensor:
98
+ data_ptr = saved_tensor.untyped_storage().data_ptr()
99
+ if data_ptr not in self._ignored_data_ptrs:
100
+ self.saved_tensor_dict[saved_tensor] = data_ptr
101
+ return saved_tensor
102
+
103
+ def unpack_hook(saved_tensor: torch.Tensor) -> torch.Tensor:
104
+ return saved_tensor
105
+
106
+ self._saved_tensors_hook = torch.autograd.graph.saved_tensors_hooks(pack_hook, unpack_hook)
107
+
108
+ def __enter__(self) -> "SavedTensorContext":
109
+ self._saved_tensors_hook.__enter__()
110
+ return self
111
+
112
+ def __exit__(self, *args: Any, **kwargs: Any) -> None:
113
+ self._saved_tensors_hook.__exit__(*args, **kwargs)
114
+
115
+ @property
116
+ def saved_tensor_mem(self) -> int:
117
+ """
118
+ The memory in bytes of all saved tensors, accounting for views into the same storage.
119
+ """
120
+ accounted_for = self._ignored_data_ptrs.copy()
121
+ total_bytes = 0
122
+ for t in self.saved_tensor_dict:
123
+ data_ptr = t.untyped_storage().data_ptr()
124
+ if data_ptr not in accounted_for:
125
+ print(f"Tensor ptr: {t.untyped_storage().data_ptr()}, "
126
+ f"shape: {t.shape}, "
127
+ f"dtype: {t.dtype}, "
128
+ f"device: {t.device}"
129
+ )
130
+ total_bytes += t.untyped_storage().nbytes()
131
+ accounted_for.add(data_ptr)
132
+ return total_bytes
python/memory/measurments/act-mem-2/attn_script.py ADDED
@@ -0,0 +1,32 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import torch
2
+
3
+ import act_mem
4
+ import layers
5
+
6
+ if __name__ == "__main__":
7
+ batch_size, seq_len, d_model, n_heads = 1, 128, 1024, 32
8
+ print(f"Batch size: {batch_size}, sequence length: {seq_len}, d_model: {d_model}, n_heads: {n_heads}")
9
+ dtype = torch.bfloat16
10
+ inputs = torch.randn(
11
+ batch_size,
12
+ seq_len,
13
+ d_model,
14
+ device="cuda",
15
+ requires_grad=True,
16
+ dtype=dtype,
17
+ )
18
+
19
+ attn = layers.Attention(
20
+ d_model=d_model,
21
+ n_heads=n_heads,
22
+ device="cuda",
23
+ dtype=dtype,
24
+ )
25
+ with act_mem.AllocatedMemContext() as mem, act_mem.SavedTensorContext(
26
+ ignored_tensors=attn.parameters()
27
+ ) as saved:
28
+ out = attn(inputs)
29
+ stm = saved.saved_tensor_mem
30
+ print(f'{mem.delta["current"]=}')
31
+ print(f"{stm=}")
32
+ print(f"{stm/out.numel()=}")
python/memory/measurments/act-mem-2/block_script.py ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Prints out the ratio of activation memory for the a transformer Block when using ReLU vs GELU.
3
+ """
4
+
5
+ import torch
6
+ import torch.nn as nn
7
+
8
+ import act_mem
9
+ import layers
10
+
11
+ if __name__ == "__main__":
12
+ batch_size, seq_len, d_model, n_heads = 2, 4096, 1024, 2
13
+ dtype = torch.bfloat16
14
+ inputs = torch.randn(
15
+ batch_size,
16
+ seq_len,
17
+ d_model,
18
+ device="cuda",
19
+ requires_grad=True,
20
+ dtype=dtype,
21
+ )
22
+
23
+ act_fn_dict = {"ReLU": nn.ReLU(), "GELU": nn.GELU()}
24
+ # Append outputs to a list to keep tensors alive
25
+ outputs = []
26
+ mem_bytes = []
27
+
28
+ for name, act_fn in act_fn_dict.items():
29
+ block = layers.Block(
30
+ d_model=d_model,
31
+ act_fn=act_fn,
32
+ n_heads=n_heads,
33
+ device="cuda",
34
+ dtype=dtype,
35
+ )
36
+ with act_mem.AllocatedMemContext() as mem, act_mem.SavedTensorContext(
37
+ ignored_tensors=block.parameters()
38
+ ) as saved:
39
+ out = block(inputs)
40
+ outputs.append(out)
41
+ print(f"{name} block bytes: {saved.saved_tensor_mem}")
42
+ mem_bytes.append(saved.saved_tensor_mem)
43
+
44
+ print(f"ReLU/GeLU block act mem ratio: {mem_bytes[0]/mem_bytes[1]}")
python/memory/measurments/act-mem-2/layers.py ADDED
@@ -0,0 +1,190 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional, Union
2
+
3
+ import einops
4
+ import torch
5
+ import torch.nn as nn
6
+ import torch.nn.functional as F
7
+
8
+
9
+ class Attention(nn.Module):
10
+ """
11
+ Minimal multi-head attention layer.
12
+ """
13
+
14
+ def __init__(
15
+ self,
16
+ d_model: int,
17
+ n_heads: int,
18
+ device: Optional[Union[str, torch.device]] = None,
19
+ dtype: Optional[torch.dtype] = None,
20
+ ):
21
+ super().__init__()
22
+ self.d_model = d_model
23
+ self.n_heads = n_heads
24
+ factory_kwargs = {"device": device, "dtype": dtype}
25
+
26
+ self.d_head, remainder = divmod(self.d_model, self.n_heads)
27
+ assert not remainder, f"{n_heads=} must divide {d_model=} evenly"
28
+
29
+ self.lin_qkv = nn.Linear(
30
+ self.d_model,
31
+ 3 * self.d_model,
32
+ **factory_kwargs,
33
+ )
34
+
35
+ self.lin_out = nn.Linear(self.d_model, self.d_model, **factory_kwargs)
36
+
37
+ def forward(
38
+ self,
39
+ inputs: torch.Tensor,
40
+ ) -> tuple[torch.Tensor, torch.Tensor, torch.Tensor]:
41
+ bsz, seq_len, _ = inputs.size()
42
+
43
+ # Create the queries, keys, values
44
+ qkv = einops.rearrange(
45
+ self.lin_qkv(inputs),
46
+ "b s (three n_h d_h) -> three b s n_h d_h",
47
+ b=bsz,
48
+ s=seq_len,
49
+ three=3,
50
+ n_h=self.n_heads,
51
+ d_h=self.d_head,
52
+ )
53
+ q, k, v = qkv
54
+
55
+ bsz, seq_len, n_heads, d_head = q.shape
56
+
57
+ shape_kwargs = dict(b=bsz, n_h=n_heads, s=seq_len, d_h=d_head)
58
+ q = einops.rearrange(q, "b s n_h d_h -> b n_h s d_h", **shape_kwargs)
59
+ k = einops.rearrange(k, "b s n_h d_h -> b n_h s d_h", **shape_kwargs)
60
+ v = einops.rearrange(v, "b s n_h d_h -> b n_h s d_h", **shape_kwargs)
61
+
62
+ # Multi-head self-attention
63
+ attn_output = F.scaled_dot_product_attention(q, k, v, is_causal=True)
64
+ attn_output = einops.rearrange(
65
+ attn_output,
66
+ "b n_h s d_h -> b s (n_h d_h)",
67
+ b=bsz,
68
+ n_h=n_heads,
69
+ s=seq_len,
70
+ d_h=d_head,
71
+ )
72
+
73
+ # Final projection
74
+ out = self.lin_out(attn_output)
75
+
76
+ return out
77
+
78
+
79
+ class MLP(nn.Module):
80
+ """
81
+ Basic MLP layer with optional Dropout.
82
+ """
83
+
84
+ def __init__(
85
+ self,
86
+ d_model: int,
87
+ act_fn: nn.Module,
88
+ dropout_prob: Optional[float] = None,
89
+ device: Optional[Union[str, torch.device]] = None,
90
+ dtype: Optional[torch.dtype] = None,
91
+ ) -> None:
92
+ super().__init__()
93
+ print(f"Shapes: d_model: {d_model}, act_fn: {act_fn}, dropout_prob: {dropout_prob}, device: {device}, dtype: {dtype}")
94
+ self.d_model = d_model
95
+ self.act_fn = act_fn
96
+ self.dropout_prob = dropout_prob
97
+ factory_kwargs = {"device": device, "dtype": dtype}
98
+
99
+ self.lin_0 = nn.Linear(self.d_model, 4 * self.d_model, **factory_kwargs)
100
+ self.lin_1 = nn.Linear(4 * self.d_model, self.d_model, **factory_kwargs)
101
+ self.dropout = nn.Dropout(self.dropout_prob) if self.dropout_prob else None
102
+
103
+ def forward(self, inputs: torch.Tensor) -> torch.Tensor:
104
+ x = self.lin_0(inputs)
105
+ x = self.act_fn(x)
106
+ x = self.lin_1(x)
107
+ if self.dropout is not None:
108
+ x = self.dropout(x)
109
+ return x
110
+
111
+
112
+ class SwiGLUMLP(nn.Module):
113
+ """
114
+ Llama 3 SwiGLU MLP layer with optional Dropout.
115
+ """
116
+
117
+ def __init__(
118
+ self,
119
+ d_model: int,
120
+ intermediate_size: int,
121
+ act_fn: nn.Module,
122
+ dropout_prob: Optional[float] = None,
123
+ device: Optional[Union[str, torch.device]] = None,
124
+ dtype: Optional[torch.dtype] = None,
125
+ ) -> None:
126
+ super().__init__()
127
+ print(f"Shapes: d_model: {d_model}, intermediate_size: {intermediate_size}, act_fn: {act_fn}, dropout_prob: {dropout_prob}, device: {device}, dtype: {dtype}")
128
+ self.d_model = d_model
129
+ self.intermediate_size = intermediate_size
130
+ self.act_fn = act_fn
131
+ self.dropout_prob = dropout_prob
132
+ factory_kwargs = {"device": device, "dtype": dtype}
133
+
134
+ self.gate_proj = nn.Linear(self.d_model, self.intermediate_size, **factory_kwargs)
135
+ self.up_proj = nn.Linear(self.d_model, self.intermediate_size, **factory_kwargs)
136
+ self.down_proj = nn.Linear(self.intermediate_size, self.d_model, **factory_kwargs)
137
+ self.dropout = nn.Dropout(self.dropout_prob) if self.dropout_prob else None
138
+
139
+ def forward(self, inputs: torch.Tensor) -> torch.Tensor:
140
+ x = self.down_proj(self.act_fn(self.gate_proj(inputs)) * self.up_proj(inputs))
141
+ if self.dropout is not None:
142
+ x = self.dropout(x)
143
+ return x
144
+
145
+
146
+ class Block(nn.Module):
147
+ """
148
+ Basic transformer block.
149
+
150
+ Schematic:
151
+ ┌──────┐
152
+ │inputs│
153
+ └┬─┬───┘
154
+ │┌▽───────────┐
155
+ ││norm_0, attn│
156
+ │└┬───────────┘
157
+ ┌▽─▽──┐
158
+ │ add │
159
+ └┬─┬──┘
160
+ │┌▽──────────┐
161
+ ││norm_1, mlp│
162
+ │└┬──────────┘
163
+ ┌▽─▽──┐
164
+ │ add │
165
+ └┬────┘
166
+ ┌▽──────┐
167
+ │outputs│
168
+ └───────┘
169
+ """
170
+
171
+ def __init__(
172
+ self,
173
+ d_model: int,
174
+ n_heads: int,
175
+ act_fn: nn.Module,
176
+ dropout_prob: Optional[float] = None,
177
+ dtype: Optional[torch.dtype] = None,
178
+ device: Optional[Union[str, torch.device]] = None,
179
+ ):
180
+ super().__init__()
181
+ factory_kwargs = {"device": device, "dtype": dtype}
182
+ self.attn = Attention(d_model=d_model, n_heads=n_heads, **factory_kwargs)
183
+ self.mlp = MLP(d_model=d_model, act_fn=act_fn, dropout_prob=dropout_prob, **factory_kwargs)
184
+ self.norm_0 = nn.LayerNorm(d_model, **factory_kwargs)
185
+ self.norm_1 = nn.LayerNorm(d_model, **factory_kwargs)
186
+
187
+ def forward(self, inputs: torch.Tensor) -> torch.Tensor:
188
+ outputs = self.attn(self.norm_0(inputs)) + inputs
189
+ outputs = self.mlp(self.norm_1(outputs)) + outputs
190
+ return outputs
python/memory/measurments/act-mem-2/mlp_script.py ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ """
2
+ Prints out the ratio of activation memory for the MLP layer when using ReLU vs GELU.
3
+ """
4
+
5
+ import torch
6
+ import torch.nn as nn
7
+
8
+ import act_mem
9
+ import layers
10
+
11
+ if __name__ == "__main__":
12
+ batch_size, seq_len, d_model, dropout_prob = 1, 128, 1024, 0.1
13
+ print(f"Batch size: {batch_size}, sequence length: {seq_len}, d_model: {d_model}, dropout_prob: {dropout_prob} ")
14
+ dtype = torch.bfloat16
15
+ inputs = torch.randn(
16
+ batch_size,
17
+ seq_len,
18
+ d_model,
19
+ device="cuda",
20
+ requires_grad=True,
21
+ dtype=dtype,
22
+ )
23
+
24
+ act_fn_dict = {"ReLU": nn.ReLU() , "GELU": nn.GELU(), "silu": nn.SiLU()}
25
+ # Append outputs to a list to keep tensors alive
26
+ outputs = []
27
+ mem_bytes = []
28
+
29
+ for name, act_fn in act_fn_dict.items():
30
+ if name == "silu":
31
+ mlp = layers.SwiGLUMLP(
32
+ d_model=d_model,
33
+ intermediate_size=4 * d_model,
34
+ act_fn=act_fn,
35
+ dropout_prob=dropout_prob,
36
+ device="cuda",
37
+ dtype=dtype,
38
+ )
39
+ else:
40
+ mlp = layers.MLP(
41
+ d_model=d_model,
42
+ act_fn=act_fn,
43
+ dropout_prob=dropout_prob,
44
+ device="cuda",
45
+ dtype=dtype,
46
+ )
47
+ with act_mem.AllocatedMemContext() as mem, act_mem.SavedTensorContext(
48
+ ignored_tensors=mlp.parameters()
49
+ ) as saved:
50
+ out = mlp(inputs)
51
+ outputs.append(out)
52
+ stm = saved.saved_tensor_mem
53
+ assert mem.delta["current"] == stm
54
+ print(f"{name} bytes: {act_mem.B_to_GiB(stm)}")
55
+ mem_bytes.append(stm)
56
+
57
+ print(f"ReLU/GELU act mem ratio: {mem_bytes[0]/mem_bytes[1]}")
python/memory/measurments/act-mem-2/requirements.txt ADDED
@@ -0,0 +1,14 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ einops==0.8.0
2
+ filelock==3.14.0
3
+ fsspec==2024.5.0
4
+ iniconfig==2.0.0
5
+ Jinja2==3.1.4
6
+ MarkupSafe==2.1.5
7
+ mpmath==1.3.0
8
+ networkx==3.3
9
+ packaging==24.0
10
+ pluggy==1.5.0
11
+ pytest==8.2.1
12
+ sympy==1.12.1
13
+ torch==2.3.0
14
+ typing_extensions==4.12.0
python/memory/measurments/act-mem-2/test.py ADDED
@@ -0,0 +1,203 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from typing import Optional
2
+
3
+ import pytest
4
+ import torch
5
+ import torch.nn as nn
6
+
7
+ import act_mem
8
+ import layers
9
+
10
+ BATCH_SIZES = (1, 2)
11
+ D_MODELS = (128, 256)
12
+ SEQ_LENS = (64, 128)
13
+ N_HEADS = (2, 4)
14
+
15
+
16
+ DEVICES = ["cpu"]
17
+ if torch.cuda.is_available():
18
+ DEVICES.append("cuda")
19
+
20
+
21
+ ZERO_MEM_ACT_FNS = [
22
+ nn.ReLU(),
23
+ nn.Sigmoid(),
24
+ nn.Tanh(),
25
+ nn.LeakyReLU(inplace=True),
26
+ nn.Sigmoid(),
27
+ ]
28
+ ALL_ACT_FNS = ZERO_MEM_ACT_FNS + [
29
+ nn.ELU(),
30
+ nn.GELU(),
31
+ nn.Hardshrink(),
32
+ nn.Hardsigmoid(),
33
+ nn.Hardswish(),
34
+ nn.Hardtanh(),
35
+ nn.LeakyReLU(),
36
+ nn.SELU(),
37
+ nn.SiLU(),
38
+ ]
39
+
40
+
41
+ class TestSavedTensorContext:
42
+ @pytest.mark.parametrize("device", DEVICES)
43
+ @pytest.mark.parametrize("d_model", D_MODELS)
44
+ @pytest.mark.parametrize("batch_size", BATCH_SIZES)
45
+ def test_linear(self, device: str, d_model: int, batch_size: int) -> None:
46
+ """
47
+ Test a simple linear layer. The inputs should be saved for backwards
48
+ """
49
+ inputs = torch.randn(batch_size, d_model, requires_grad=True, device=device)
50
+ lin = nn.Linear(d_model, d_model, device=device)
51
+ with act_mem.SavedTensorContext(ignored_tensors=lin.parameters()) as saved:
52
+ _ = lin(inputs)
53
+ assert saved.saved_tensor_mem == inputs.numel() * inputs.element_size()
54
+
55
+ @pytest.mark.parametrize("device", DEVICES)
56
+ @pytest.mark.parametrize("d_model", D_MODELS)
57
+ @pytest.mark.parametrize("batch_size", BATCH_SIZES)
58
+ def test_linear_amp(self, device: str, d_model: int, batch_size: int) -> None:
59
+ """
60
+ Test a linear layer with AMP. The saved tensors should now be a low-precision version of the
61
+ inputs and the low-precision version of the weights version of the weights
62
+ """
63
+ inputs = torch.randn(batch_size, d_model, requires_grad=True, device=device)
64
+ lin = nn.Linear(d_model, d_model, device=device)
65
+ dtype = torch.bfloat16
66
+ with torch.autocast(device_type=device, dtype=dtype):
67
+ with act_mem.SavedTensorContext(ignored_tensors=lin.parameters()) as saved:
68
+ out = lin(inputs)
69
+ assert (
70
+ saved.saved_tensor_mem
71
+ == out.numel() * out.element_size() + lin.weight.numel() * dtype.itemsize
72
+ )
73
+
74
+ @pytest.mark.parametrize("act_fn", ALL_ACT_FNS)
75
+ @pytest.mark.parametrize("dropout_prob", (None, 0.5))
76
+ @pytest.mark.parametrize("device", DEVICES)
77
+ @pytest.mark.parametrize("d_model", D_MODELS)
78
+ @pytest.mark.parametrize("batch_size", BATCH_SIZES)
79
+ @pytest.mark.parametrize("seq_len", SEQ_LENS)
80
+ def test_mlp(
81
+ self,
82
+ act_fn: nn.Module,
83
+ dropout_prob: Optional[float],
84
+ device: str,
85
+ d_model: int,
86
+ batch_size: int,
87
+ seq_len: int,
88
+ ) -> None:
89
+ """
90
+ For the transformer MLP layer with a ReLU non-linearity, the initial inputs and the inputs
91
+ to the final linear layer (which are four times as large) must always be saved. If the
92
+ derivative of the activation function cannot be expressed in terms of the activation
93
+ function's *outputs*, then the activation inputs must also be saved (which are again four
94
+ times as large as the MLP's inputs). The MLP activation memory can be nearly halved by a
95
+ choice of activation function.
96
+ """
97
+ inputs = torch.randn(
98
+ batch_size, seq_len, d_model, requires_grad=True, device=device
99
+ )
100
+ expansion_factor = 4
101
+ mlp = layers.MLP(
102
+ d_model=d_model, act_fn=act_fn, dropout_prob=dropout_prob, device=device
103
+ )
104
+ with act_mem.SavedTensorContext(ignored_tensors=mlp.parameters()) as saved:
105
+ _ = mlp(inputs)
106
+
107
+ # Compare measured memory against expected
108
+ first_lin_input_mem = act_mem.get_tensor_bytes(inputs)
109
+ second_lin_input_mem = expansion_factor * first_lin_input_mem
110
+ # Only some activations require additional activation memory
111
+ activation_input_mem = 0 if act_fn in ZERO_MEM_ACT_FNS else second_lin_input_mem
112
+ dropout_act_mem = (
113
+ 0 if not dropout_prob else inputs.numel() * (4 if device == "cpu" else 1)
114
+ )
115
+
116
+ expected_mem = (
117
+ first_lin_input_mem
118
+ + second_lin_input_mem
119
+ + activation_input_mem
120
+ + dropout_act_mem
121
+ )
122
+ assert saved.saved_tensor_mem == expected_mem
123
+
124
+ @pytest.mark.parametrize("act_fn", ALL_ACT_FNS)
125
+ @pytest.mark.parametrize("dropout_prob", (None, 0.5))
126
+ @pytest.mark.parametrize("device", DEVICES)
127
+ @pytest.mark.parametrize("d_model", D_MODELS)
128
+ @pytest.mark.parametrize("batch_size", BATCH_SIZES)
129
+ @pytest.mark.parametrize("seq_len", SEQ_LENS)
130
+ def test_mlp_amp(
131
+ self,
132
+ act_fn: nn.Module,
133
+ dropout_prob: Optional[float],
134
+ device: str,
135
+ d_model: int,
136
+ batch_size: int,
137
+ seq_len: int,
138
+ ) -> None:
139
+ """
140
+ Similar story with AMP. The only changes come from the modified dtypes and needing to also
141
+ save references to the low-precision weights in the Linear layers.
142
+ """
143
+ inputs = torch.randn(
144
+ batch_size, seq_len, d_model, requires_grad=True, device=device
145
+ )
146
+ expansion_factor = 4
147
+ mlp = layers.MLP(
148
+ d_model=d_model, act_fn=act_fn, dropout_prob=dropout_prob, device=device
149
+ )
150
+ dtype = torch.bfloat16
151
+ with torch.autocast(device_type=device, dtype=dtype):
152
+ with act_mem.SavedTensorContext(ignored_tensors=mlp.parameters()) as saved:
153
+ _ = mlp(inputs)
154
+
155
+ # Compare measured memory against expected
156
+ amp_weight_mem = 2 * expansion_factor * d_model**2 * dtype.itemsize
157
+ first_lin_input_mem = inputs.numel() * dtype.itemsize
158
+ second_lin_input_mem = expansion_factor * inputs.numel() * dtype.itemsize
159
+ # Only some activations require additional activation memory
160
+ activation_input_mem = 0 if act_fn in ZERO_MEM_ACT_FNS else second_lin_input_mem
161
+ dropout_act_mem = (
162
+ 0
163
+ if not dropout_prob
164
+ else inputs.numel() * (dtype.itemsize if device == "cpu" else 1)
165
+ )
166
+
167
+ expected_mem = (
168
+ amp_weight_mem
169
+ + first_lin_input_mem
170
+ + second_lin_input_mem
171
+ + activation_input_mem
172
+ + dropout_act_mem
173
+ )
174
+ assert (
175
+ saved.saved_tensor_mem == expected_mem
176
+ ), f"Failed on {act_fn=}, {dropout_prob=}"
177
+
178
+
179
+ @pytest.mark.skipif(not torch.cuda.is_available(), reason="cuda not available")
180
+ class TestCUDAMemReadings:
181
+ @pytest.mark.parametrize("d_model", D_MODELS)
182
+ @pytest.mark.parametrize("batch_size", BATCH_SIZES)
183
+ @pytest.mark.parametrize("seq_len", SEQ_LENS)
184
+ @pytest.mark.parametrize("act_fn", ALL_ACT_FNS)
185
+ def test_mlp(
186
+ self, d_model: int, batch_size: int, seq_len: int, act_fn: nn.Module
187
+ ) -> None:
188
+ """
189
+ Track saved tensors and allocated memory and verify they agree.
190
+ """
191
+
192
+ inputs = torch.randn(batch_size, seq_len, d_model, device="cuda")
193
+ mlp = layers.MLP(d_model=d_model, act_fn=act_fn, device="cuda")
194
+
195
+ with act_mem.AllocatedMemContext() as mem, act_mem.SavedTensorContext(
196
+ ignored_tensors=mlp.parameters()
197
+ ) as saved:
198
+ outputs = mlp(inputs)
199
+
200
+ # AllocatedMemContext captures the outputs, but not inputs, while SavedTensorContext
201
+ # captures inputs and not outputs. Nevertheless, the readings agree because inputs and
202
+ # outputs are tensors of the same size and `dtype`.
203
+ assert mem.delta["current"] == saved.saved_tensor_mem
src/bibliography.bib ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ @article{example2023,
2
+ title={Example Paper Title},
3
+ author={Example, Author and Another, Author},
4
+ journal={Journal of Examples},
5
+ volume={1},
6
+ number={1},
7
+ pages={1--10},
8
+ year={2023},
9
+ publisher={Example Publisher}
10
+ }
11
+
12
+ @online{distill,
13
+ title={Distill},
14
+ url={https://distill.pub},
15
+ year={2016},
16
+ publisher={Distill Working Group}
17
+ }
18
+
19
+ @book{textbook2022,
20
+ title={An Example Textbook},
21
+ author={Author, Example},
22
+ year={2022},
23
+ publisher={Example Academic Press}
24
+ }
src/distill.js ADDED
The diff for this file is too large to render. See raw diff
 
src/fragmentLoader.js ADDED
@@ -0,0 +1,114 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ async function loadFragments() {
2
+ // Find all elements with ids starting with 'fragment-'
3
+ const fragmentElements = Array.from(document.querySelectorAll('[id^="fragment-"]'));
4
+
5
+ class FetchQueue {
6
+ constructor(maxConcurrent = 3) {
7
+ this.queue = [];
8
+ this.maxConcurrent = maxConcurrent;
9
+ this.activeFetches = 0;
10
+ this.maxRetries = 3; // Maximum number of retry attempts
11
+ this.baseDelay = 1000; // Base delay in milliseconds (1 second)
12
+ }
13
+
14
+ async sleep(ms) {
15
+ return new Promise(resolve => setTimeout(resolve, ms));
16
+ }
17
+
18
+ async fetchWithRetry(fragmentPath, retryCount = 0) {
19
+ try {
20
+ const response = await fetch(fragmentPath);
21
+ if (!response.ok) {
22
+ throw new Error(`HTTP error! status: ${response.status}`);
23
+ }
24
+ return await response.text();
25
+ } catch (error) {
26
+ if (retryCount < this.maxRetries) {
27
+ // Exponential backoff: 1s, 2s, 4s
28
+ const delay = this.baseDelay * Math.pow(2, retryCount);
29
+ console.warn(`Retry ${retryCount + 1}/${this.maxRetries} for ${fragmentPath} after ${delay}ms`);
30
+ await this.sleep(delay);
31
+ return this.fetchWithRetry(fragmentPath, retryCount + 1);
32
+ }
33
+ throw error;
34
+ }
35
+ }
36
+
37
+ async addFetch(element) {
38
+ const fragmentName = element.id.replace('fragment-', '');
39
+ const fragmentPath = `fragments/${fragmentName}.html`;
40
+
41
+ return new Promise(async (resolve, reject) => {
42
+ try {
43
+ const fetchPromise = (async () => {
44
+ try {
45
+ const html = await this.fetchWithRetry(fragmentPath);
46
+
47
+ // Process the fragment
48
+ const temp = document.createElement('div');
49
+ temp.innerHTML = html;
50
+ element.innerHTML = temp.innerHTML;
51
+
52
+ // Handle scripts
53
+ const scripts = temp.getElementsByTagName('script');
54
+ Array.from(scripts).forEach(oldScript => {
55
+ const newScript = document.createElement('script');
56
+ Array.from(oldScript.attributes).forEach(attr => {
57
+ newScript.setAttribute(attr.name, attr.value);
58
+ });
59
+ newScript.textContent = oldScript.textContent;
60
+ oldScript.parentNode.removeChild(oldScript);
61
+ document.body.appendChild(newScript);
62
+ });
63
+
64
+ this.activeFetches--;
65
+ resolve();
66
+ } catch (error) {
67
+ console.error(`Failed to load fragment ${fragmentPath} after ${this.maxRetries} retries:`, error);
68
+ this.activeFetches--;
69
+ reject(error);
70
+ }
71
+ })();
72
+
73
+ this.queue.push(fetchPromise);
74
+ this.activeFetches++;
75
+ } catch (error) {
76
+ reject(error);
77
+ }
78
+ });
79
+ }
80
+
81
+ async processNext(element) {
82
+ if (this.activeFetches < this.maxConcurrent && element) {
83
+ await this.addFetch(element);
84
+ }
85
+ }
86
+ }
87
+
88
+ // Initialize queue
89
+ const fetchQueue = new FetchQueue(3);
90
+ let currentIndex = 0;
91
+ const elements = fragmentElements; // Assuming this is defined elsewhere
92
+
93
+ // Initial loading of first 3 elements
94
+ while (currentIndex < elements.length && currentIndex < 3) {
95
+ await fetchQueue.processNext(elements[currentIndex]);
96
+ currentIndex++;
97
+ }
98
+
99
+ // Process remaining elements as fetches complete
100
+ while (currentIndex < elements.length) {
101
+ // Wait for any fetch to complete
102
+ await Promise.race(fetchQueue.queue);
103
+ // Remove completed fetch from queue
104
+ fetchQueue.queue = fetchQueue.queue.filter(p => p.status === 'pending');
105
+ // Add next element to queue
106
+ await fetchQueue.processNext(elements[currentIndex]);
107
+ currentIndex++;
108
+ }
109
+
110
+ // Wait for remaining fetches to complete
111
+ await Promise.all(fetchQueue.queue);
112
+ }
113
+
114
+ export { loadFragments }
src/fragments/banner.html ADDED
@@ -0,0 +1 @@
 
 
1
+ <div> <div id="88bd2bde-2a9d-42b6-bea3-2a6d06b034ea" class="plotly-graph-div" style="height:400px; width:1200px;"></div> <script type="text/javascript"> window.PLOTLYENV=window.PLOTLYENV || {}; if (document.getElementById("88bd2bde-2a9d-42b6-bea3-2a6d06b034ea")) { Plotly.newPlot( "88bd2bde-2a9d-42b6-bea3-2a6d06b034ea", [{"customdata":[["biiig dot"],["ok-ish dot"],["a dot"],["a dot"],["biiig dot"],["a dot"],["ok-ish dot"],["smol dot"],["a dot"],["a dot"],["ok-ish dot"],["smol dot"],["a dot"],["biiig dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["smol dot"],["a dot"],["smol dot"],["smol dot"],["biiig dot"],["a dot"],["smol dot"],["biiig dot"],["a dot"],["smol dot"],["a dot"],["biiig dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["a dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["smol dot"],["smol dot"],["smol dot"],["smol dot"],["smol dot"],["ok-ish dot"],["a dot"],["a dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["smol dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["biiig dot"],["a dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["smol dot"],["smol dot"],["a dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["a dot"],["a dot"],["biiig dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["biiig dot"],["a dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["a dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["a dot"],["biiig dot"],["a dot"],["biiig dot"],["smol dot"],["a dot"],["biiig dot"],["smol dot"],["a dot"],["a dot"],["a dot"],["biiig dot"],["a dot"],["biiig dot"],["ok-ish dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["a dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["a dot"],["smol dot"],["a dot"],["smol dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["a dot"],["biiig dot"],["a dot"],["biiig dot"],["smol dot"],["smol dot"],["a dot"],["biiig dot"],["biiig dot"],["smol dot"],["biiig dot"],["biiig dot"],["a dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["smol dot"],["a dot"],["biiig dot"],["biiig dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["biiig dot"],["ok-ish dot"],["a dot"],["biiig dot"],["smol dot"],["smol dot"],["a dot"],["smol dot"],["a dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["a dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["smol dot"],["smol dot"],["smol dot"],["a dot"],["a dot"],["biiig dot"],["ok-ish dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["biiig dot"],["a dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["a dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["biiig dot"],["a dot"],["biiig dot"],["a dot"],["a dot"],["ok-ish dot"],["a dot"],["smol dot"],["smol dot"],["a dot"],["biiig dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["smol dot"],["ok-ish dot"],["a dot"],["a dot"],["biiig dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["smol dot"],["smol dot"],["a dot"],["biiig dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["smol dot"],["biiig dot"],["smol dot"],["smol dot"],["biiig dot"],["a dot"],["a dot"],["a dot"],["biiig dot"],["biiig dot"],["a dot"],["a dot"],["a dot"],["a dot"],["biiig dot"],["a dot"],["biiig dot"],["ok-ish dot"],["biiig dot"],["a dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["a dot"],["smol dot"],["a dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["a dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["a dot"],["biiig dot"],["biiig dot"],["smol dot"],["smol dot"],["smol dot"],["biiig dot"],["smol dot"],["smol dot"],["smol dot"],["a dot"],["ok-ish dot"],["a dot"],["biiig dot"],["smol dot"],["smol dot"],["biiig dot"],["a dot"],["biiig dot"],["a dot"],["biiig dot"],["a dot"],["smol dot"],["a dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["a dot"],["smol dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["a dot"],["a dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["a dot"],["smol dot"],["ok-ish dot"],["smol dot"],["smol dot"],["smol dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["a dot"],["ok-ish dot"],["a dot"],["smol dot"],["biiig dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["a dot"],["smol dot"],["biiig dot"],["smol dot"],["smol dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["a dot"],["smol dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["a dot"],["a dot"],["a dot"],["ok-ish dot"],["a dot"],["biiig dot"],["smol dot"],["smol dot"],["a dot"],["ok-ish dot"],["a dot"],["smol dot"],["smol dot"],["biiig dot"],["smol dot"],["smol dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["a dot"],["ok-ish dot"],["smol dot"],["a dot"],["a dot"],["smol dot"],["a dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["a dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["smol dot"],["a dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["a dot"],["ok-ish dot"],["biiig dot"],["a dot"],["biiig dot"],["biiig dot"],["smol dot"],["biiig dot"],["a dot"],["a dot"],["smol dot"],["biiig dot"],["a dot"],["a dot"],["smol dot"],["ok-ish dot"],["smol dot"],["a dot"],["ok-ish dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["a dot"],["biiig dot"],["smol dot"],["a dot"],["smol dot"],["ok-ish dot"],["biiig dot"],["biiig dot"],["biiig dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["smol dot"],["smol dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["ok-ish dot"],["smol dot"],["smol dot"],["smol dot"],["a dot"],["a dot"],["a dot"],["a dot"],["smol dot"],["a dot"],["a dot"],["biiig dot"],["ok-ish dot"],["ok-ish dot"],["smol dot"],["a dot"],["ok-ish dot"],["smol dot"],["smol dot"],["ok-ish dot"],["a dot"]],"hoverlabel":{"namelength":0},"hovertemplate":"Dot category: %{customdata[0]}","marker":{"color":{"dtype":"f8","bdata":"NSuP9zcJI0DzwFYGn0gaQLZzmkZHByFA+P+aDpbfHkCwxIAvKbMiQLxO7ywDOSFAkrQilW9qGkA42CJpbWcUQBsoeWzpYyBAQjfOKzOZHkAirl5Y47kZQHo9O10vahVA9tL3qjtYIEDDffk1l7UjQO4gDWWi9RhAbl4O21pGHEC2NukPe44cQEBdAmUkehhAaO9vYTNPGEAkKuTDTKQeQCwfNyVGoRZA7AXlPkQVFUBBS1LPtqEhQHjQxmS+fSBAzIXDZfEoGEDwmCYU3FQjQNBMefzIwx5APDOqSw25FUCJceoo8vQeQP7yGSlEKyNAQTLBxzJtHUDBmCp6+7oiQBoQiWhE8yFA8nURfbx6IUBmXmxn2ucZQCV81yB8MR1APe9aowa5IED5TDni5\u002fMiQE8Bv4u6zhxAuahCRUDHGUAuRAEnvokVQK9yTdCItxdAS9Rm\u002fEJxFUAkVNMOqH0XQLUcVRC3QhVAr9JC7d6BFECx6VQKAsMZQJiTu8jHAiBASNpbXE+iIEBuNZJTAhkjQDf4LRmEZRxAAc\u002fXZA4iGEBoI3er6SwbQDzSHSGc\u002fiBALJKQx8UFGUBnxhDVR5kZQNxdroKZTRVAmIzuoRi0G0DXi1x1ruEXQH4OD4n5ghhA2GYHR1L1HkBGY0\u002fvvsMgQFQklZCQ6x5AUoEYHYbFFEC9SZgflvgbQDubyvxZJR9Al0AR8t+YG0BjEwai1O8jQIm9fGx9DB9AJ\u002fViqUi9FkCP01kOAW8ZQF\u002fwJr5pmCBAsN8BVrmfHECAA0NocFweQEK4F2+EXxtA6ie8cVJaIUADJI6NekMdQB2bndnvwxdAjTccD2guFUBBMr6RvxEhQHnH3ClkthtASKBd2FYgIkAOCefEnMwiQB71sK8twR5AKix3z3LvIECge2K95\u002fQjQKKwiqhdnhdApBqwIX9fGkBjBAhPEWAbQNxyN8qJEBZAFRHupGPBHEBM28zFik0gQHsg478ihhlAx1QGiQRgIkB193H6ybMjQAAXe2\u002fpiiJAFofW\u002fyalIEAvhq2s3p8bQCAw71hf8SNAOICnOo\u002fwGUABkT73IBIUQDqKRFRq7iFAWuOaJsGiGkCdNtGT1T0eQPwUzYZdhhxA226rezPlIEAK2rKtZCYhQM6e29Px6RlALDlNZmBcIkBF51ygESIbQARdbLsmGiBAcPPGqWmoI0AA9vbuxj0fQFLy6PqqKSJAX2nTXbL2FkDk6uyOBjsgQBIKnNMwzSFADkUJljHpFUBRWFONQhkfQF7mpoKcuR5AS6GPzhyaIECWv1cvA3QiQCTJYvjoRh5AFSMr+II+I0DAVgHpCp0ZQDLZRRIwCx5AToyx0piwIECo1p8Myr0fQOSPrLgYpBZAM1UrnbGlIEDy552AKr4YQE8C39hIuiJAmPTyZN1jG0B9p4nAEOscQDBHdvtiPBZAATP1FnlSIUC9lsGRS2sUQHTHAX8sViBAxsOQHXPOFUD8JQEmPBgcQPrz2LeufxZAhNMjenowHUBT+7k7bO8YQASsYAOMfhlAef5a6KU8F0BMqKTYMfQZQIBwogq5mh5AsyQGXIyzHECm+hE9MjwiQLKGFnhocB1ADTMLPgRxFUCO9NdLNYEhQMy\u002fTuw\u002fwxpAeqAQGXgoGEDg1wTLsCwcQMC+x+kqLCNAQy3+JdgJFkDajzrsrUUUQCi4qErh6RxASsFgzXDsGkAtogkxtf8jQPLsdJgkxhxAKjnbSbMrIECWefuN6fMiQKbpd2eHxh9AbEz3spPGI0CA3RaqTq4UQJhk6nfe8xRAWbiqkx5oHkBbUE1wC4MjQLr9sg+1+CFAaXQ1CDK4GEBi8yblEzcjQOUy6xGLuyJA2vM+aS8eIUBDzKhWJ4UjQM2IV\u002fUqhxpAgZjH335aGUC\u002fCbav8yQYQBN9THrmgxZAykj1ErUCHkANqY5Z1e4hQPjVtvSFrSFAIIEFQhyPHkCW76\u002fZUPEaQEo256Q8ORxAblmeP4aiIkCueeDEm3UjQEXS4+kRdiJAjeNXaVm9GkDYxejIj50fQKO+bcSJdiNAr5cChkQcF0Dvt+qkTPcVQPShkHR4MR9AonlqcvnTGEBvTbvwc\u002fggQB\u002fmUxg9DBRA36HYc0QHI0A\u002fOMEnWU4cQI\u002f5IjbVpSBADkL2EA7EGkA7x\u002f0ADSEhQGOxHli7QBpAElvxWLuVIEAEPS8PokEaQGoP7+CiqhtAelK\u002fncH7FUC6io+f4MgeQI8O806nQR1AAJb6Oue1FEDH30BZI3gZQLSKZLXNqSNAt0nJeolIIkBu3y6iCqsXQLINEatrzRZAIvn0fdw4FEDA2IWf83UeQKEy+iUjBCBArE1hmsbGI0BklE0I9kwZQGM7pV3HHCBA\u002fhDNXYsOH0AhE22eArIgQIYTUgxigRdAXitsu7eYGkA9RXE5IPYjQDxwmauxkRlAfDYQL6rVI0BUgV+Ks3kgQG6AnJhXXR1A8or\u002f2hwSFUAIgfUlIwsZQDTcZVLumhxA6QjirXc3FUA10SE2ppkaQG\u002fchBsurRZAEKqnjjRRG0DbXS+XVOMjQCR+klfDvCBAcvT5sjPmG0AuEZiM9VkiQE7\u002f9nPMwxhAzUNaGpsYIkAMFl+iM4geQAbi0IVJkSJAXxa114t\u002fH0A4LZs7SxMhQCPR49QEkxtA1Jk77+3lIECI0aR07toVQIsuW1P1hBdAR4MfDSR2IECzOdYNDI4hQOpD59D\u002fEBZAiERnFZezHECEaCFOJH0iQJ6LD6MStRpAIPg8FEDaFkC77pOHxHsXQIKYaKpe0hxACHYgLLeVH0CMjGdWsu0gQAp8kRBXiiFABnaqzwIiHED4o8ohaUYjQB6ez25KZhVAODyihKQZGEAVl18M61sVQGTFlTpa1iBANQ9RIDogI0A0b3ba87kVQAxFuokw1xlAVn0YdOV5IkCZF1qq3xEYQF70crxSgRhAmjv\u002fDYqqI0A2Tf\u002f9wFcUQF0zMnYpyBVAp+Va3dnnI0DrnbA6MMwgQK2PaoBKXx9AvDk4yCcMHkCMHtGK3hUiQOBifysgRyJATjUTFEe3IEAgYn34c1QgQEvJtnn6nB9ArB0t3VRRHkCp64NUq68jQOwK\u002foo\u002fQiFAfOBrePfqI0DdiBMwugIdQOw4g57jZiJA\u002fmRSrrobIUDgwI46TQMcQLbiHMtOfyNA\u002fKVcwF8gFEC+RWrXTB8eQOZRReUF0xRAJG5asaiVIEDakyC8ttwcQGbJjrgiCyJAFUL1ynSOHEBCG81wE28UQCrioPQFRyNA2PjDcDBsGUB4mAX+diMVQFdwfQyOxiBA9OHZFopzHUDVGh8\u002frCMjQMrNAYaphxVAaZIfeQx9HkBDOPxNx0IjQE9mdsWUJyJAX\u002fQc\u002f+2mFEAfi1InuigXQFDGmnvezBZA6PU\u002fkkx1IkCftvrATAcYQJhU2m0ywhhAa+DPV2YBFkBjxzSzkyYhQAdYVtH4tBtAQnTQUPrvH0Blcff07xgjQOomWdPlGRVAHftwUzmWF0Cu3UU8\u002fQMjQGNEGODnGyFAI9CO58OXIkC4uGz9+GkeQL9mwKcK2SFAiCDh8DB0HkA\u002ffgMr1AoXQCqEn79enR5A5\u002f4QbQF\u002fFUDo63HexOYbQKGlPexgAiNA7w5z3\u002fe7FUD7w0ihBSwWQPObsdTv1xlARBUncGKNIkBziwdUxmAhQIJc4VeMchZAyNWlhm3JGkD1cSz66tgWQH3dkv6F9hlAnJZtjukzHkC+gt+\u002fyXceQGuFps\u002fhsyJA5RxPbzDJHUBngvyx1AkcQPq8r9pjxhlA6lCggb+pIEAkljYd5PkUQAoR+A4uZBxAepH5CPOWF0D2xOpi0owXQOzzMRoF\u002fRVAimoq9\u002fDjHkBCnaUXrbgbQBvV9Dlc+RxAzhvtX0bDIUBeDedw3NgiQI9ETzyi9h9ASi4PNTLaHUCaJ5fLdK8gQMj5UGSR6xVAUmrCJdbYIkAZMNcR95keQKkPSNYCCCBAXFQQu7B0IUDjTOlgmtAVQDw40sx3lyBAHcAuNNttF0Cz7w7mKb0hQH2I2dYpoBVArnEF66FxFUDiyzTxOfsVQD10LW8mDRpApmGJNN4AIkDWaZ3JVGcdQDlgj\u002fvhrhxAso53sF8EIkB4DnaJeHkiQIxYL6+8JyBAWHPi7bYIFUBHMGhAEY4dQH\u002fu5BowQCBA3KbYZVFMGkDeyXgP+iwhQCIhfAhIyBlApYieNe1bHkBjEq8JlEcZQIsVC53D8x1AKiHzjaXiHkAcYAKff1EeQOIpVwfIViFArTruYthVG0DnODsdngYeQIqpzBDaiSJAetNrtQUVF0Cs5RrCv1MXQPy\u002fs1wPBx9AcVZi3gmMG0AAcSNO7GAhQO7DrwgelBdA4I9\u002fCbbRFUD02rnmUXYiQA0nAofWmxZA9lpQ2knmGEBIzd08bWMjQHRwoyEaExxAnDR0LaLVFUANmCOh5FUhQJSfonbdfxtAUjbX2LijF0DaEYkjMwgfQKvTLhO59x9AAdAuHiVqFkD+wSLgFSYhQM4UO56kBBpAwBrh7AVxFEBnSg21iIIdQEU3tP5vTiFA\u002fH7caY8FGEA1ZCiKw6MaQNRXiCFsAhxAHKfehkc5G0CMiz3qO38XQCjWrBo0vB1AHUNHqkXNGED42SOQswwWQINqqlljQCFASO0cCsKpHUDXqSebilQZQDc5kkkoYxlAQvk0RIkGG0CCYbCOrWgeQIA\u002fqE12PhtAcXFUY4VkI0AOyXgkkBIeQKJvQPHYtiFAe0EZtDaYIUD8\u002fQlH7V0WQHM7peTEYyJAijXMZdN3H0C+JmlrDr8eQB5qjSf0CxhA9uOfyXiRIUDyfwSTBPkgQEfe0AG38CBAZrKTYsMAFUBS7s8vM4YdQP8kigLgehdAKT1cehiLIECGFsISqFcZQEtuU8KPBSFADHYpuE\u002fUH0Bzii+nNJogQBKLWoeEPRZA6JhcA0n8HECo043e+PsaQKiwTkQ\u002fhiBAYIbQ2Ag+I0BAUs3h0hUVQFMSLlE9sCBAa858w+OnFEBUZbCvilMZQLXSXhGnYiNAXrgm5HR0I0AtIFA36OMiQL7jBurC8BpA+IKdy5PfIUCyBWnn55gaQCBSf8phhhdA0Wa39GivHUAWl8V29VsdQN0iq6mx7CFAgOs7Eh6sFED2xGaxhMkVQMnA1q8QoBtAKjYhWCp0GkD+bx8TmFMZQI3ZsRBNnCJAqemy4d0BHEAm4+7jnmAbQC7zeSXclRpA6d6Q6mIlG0Cp0Zf1k10WQNQhSlVqBxxAHAGbxJfFFUC2J3iKX1cYQJwxxnwaMRRA7ZKHBBWwIEDKKimWtkohQHaPE1U0cyFAM3OghPxkHkAgTj3nsEoYQNTXKBxhYh9AOxQ8ny92IUAM9NL\u002fsfIjQHSF5K38QBpA2FyPhk6SGkAk3MpoeFAUQAPDfMo61B9AuD4ny5LdHEBcOdiDMTgUQEageTdcbBVAkiFjKsK8GkDdjkY8xpkgQA=="},"colorscale":[[0,"rgb(78, 165, 183)"],[0.5,"rgb(206, 192, 250)"],[1,"rgb(232, 137, 171)"]],"opacity":0.9,"size":{"dtype":"f8","bdata":"NSuP9zcJI0DzwFYGn0gaQLZzmkZHByFA+P+aDpbfHkCwxIAvKbMiQLxO7ywDOSFAkrQilW9qGkA42CJpbWcUQBsoeWzpYyBAQjfOKzOZHkAirl5Y47kZQHo9O10vahVA9tL3qjtYIEDDffk1l7UjQO4gDWWi9RhAbl4O21pGHEC2NukPe44cQEBdAmUkehhAaO9vYTNPGEAkKuTDTKQeQCwfNyVGoRZA7AXlPkQVFUBBS1LPtqEhQHjQxmS+fSBAzIXDZfEoGEDwmCYU3FQjQNBMefzIwx5APDOqSw25FUCJceoo8vQeQP7yGSlEKyNAQTLBxzJtHUDBmCp6+7oiQBoQiWhE8yFA8nURfbx6IUBmXmxn2ucZQCV81yB8MR1APe9aowa5IED5TDni5\u002fMiQE8Bv4u6zhxAuahCRUDHGUAuRAEnvokVQK9yTdCItxdAS9Rm\u002fEJxFUAkVNMOqH0XQLUcVRC3QhVAr9JC7d6BFECx6VQKAsMZQJiTu8jHAiBASNpbXE+iIEBuNZJTAhkjQDf4LRmEZRxAAc\u002fXZA4iGEBoI3er6SwbQDzSHSGc\u002fiBALJKQx8UFGUBnxhDVR5kZQNxdroKZTRVAmIzuoRi0G0DXi1x1ruEXQH4OD4n5ghhA2GYHR1L1HkBGY0\u002fvvsMgQFQklZCQ6x5AUoEYHYbFFEC9SZgflvgbQDubyvxZJR9Al0AR8t+YG0BjEwai1O8jQIm9fGx9DB9AJ\u002fViqUi9FkCP01kOAW8ZQF\u002fwJr5pmCBAsN8BVrmfHECAA0NocFweQEK4F2+EXxtA6ie8cVJaIUADJI6NekMdQB2bndnvwxdAjTccD2guFUBBMr6RvxEhQHnH3ClkthtASKBd2FYgIkAOCefEnMwiQB71sK8twR5AKix3z3LvIECge2K95\u002fQjQKKwiqhdnhdApBqwIX9fGkBjBAhPEWAbQNxyN8qJEBZAFRHupGPBHEBM28zFik0gQHsg478ihhlAx1QGiQRgIkB193H6ybMjQAAXe2\u002fpiiJAFofW\u002fyalIEAvhq2s3p8bQCAw71hf8SNAOICnOo\u002fwGUABkT73IBIUQDqKRFRq7iFAWuOaJsGiGkCdNtGT1T0eQPwUzYZdhhxA226rezPlIEAK2rKtZCYhQM6e29Px6RlALDlNZmBcIkBF51ygESIbQARdbLsmGiBAcPPGqWmoI0AA9vbuxj0fQFLy6PqqKSJAX2nTXbL2FkDk6uyOBjsgQBIKnNMwzSFADkUJljHpFUBRWFONQhkfQF7mpoKcuR5AS6GPzhyaIECWv1cvA3QiQCTJYvjoRh5AFSMr+II+I0DAVgHpCp0ZQDLZRRIwCx5AToyx0piwIECo1p8Myr0fQOSPrLgYpBZAM1UrnbGlIEDy552AKr4YQE8C39hIuiJAmPTyZN1jG0B9p4nAEOscQDBHdvtiPBZAATP1FnlSIUC9lsGRS2sUQHTHAX8sViBAxsOQHXPOFUD8JQEmPBgcQPrz2LeufxZAhNMjenowHUBT+7k7bO8YQASsYAOMfhlAef5a6KU8F0BMqKTYMfQZQIBwogq5mh5AsyQGXIyzHECm+hE9MjwiQLKGFnhocB1ADTMLPgRxFUCO9NdLNYEhQMy\u002fTuw\u002fwxpAeqAQGXgoGEDg1wTLsCwcQMC+x+kqLCNAQy3+JdgJFkDajzrsrUUUQCi4qErh6RxASsFgzXDsGkAtogkxtf8jQPLsdJgkxhxAKjnbSbMrIECWefuN6fMiQKbpd2eHxh9AbEz3spPGI0CA3RaqTq4UQJhk6nfe8xRAWbiqkx5oHkBbUE1wC4MjQLr9sg+1+CFAaXQ1CDK4GEBi8yblEzcjQOUy6xGLuyJA2vM+aS8eIUBDzKhWJ4UjQM2IV\u002fUqhxpAgZjH335aGUC\u002fCbav8yQYQBN9THrmgxZAykj1ErUCHkANqY5Z1e4hQPjVtvSFrSFAIIEFQhyPHkCW76\u002fZUPEaQEo256Q8ORxAblmeP4aiIkCueeDEm3UjQEXS4+kRdiJAjeNXaVm9GkDYxejIj50fQKO+bcSJdiNAr5cChkQcF0Dvt+qkTPcVQPShkHR4MR9AonlqcvnTGEBvTbvwc\u002fggQB\u002fmUxg9DBRA36HYc0QHI0A\u002fOMEnWU4cQI\u002f5IjbVpSBADkL2EA7EGkA7x\u002f0ADSEhQGOxHli7QBpAElvxWLuVIEAEPS8PokEaQGoP7+CiqhtAelK\u002fncH7FUC6io+f4MgeQI8O806nQR1AAJb6Oue1FEDH30BZI3gZQLSKZLXNqSNAt0nJeolIIkBu3y6iCqsXQLINEatrzRZAIvn0fdw4FEDA2IWf83UeQKEy+iUjBCBArE1hmsbGI0BklE0I9kwZQGM7pV3HHCBA\u002fhDNXYsOH0AhE22eArIgQIYTUgxigRdAXitsu7eYGkA9RXE5IPYjQDxwmauxkRlAfDYQL6rVI0BUgV+Ks3kgQG6AnJhXXR1A8or\u002f2hwSFUAIgfUlIwsZQDTcZVLumhxA6QjirXc3FUA10SE2ppkaQG\u002fchBsurRZAEKqnjjRRG0DbXS+XVOMjQCR+klfDvCBAcvT5sjPmG0AuEZiM9VkiQE7\u002f9nPMwxhAzUNaGpsYIkAMFl+iM4geQAbi0IVJkSJAXxa114t\u002fH0A4LZs7SxMhQCPR49QEkxtA1Jk77+3lIECI0aR07toVQIsuW1P1hBdAR4MfDSR2IECzOdYNDI4hQOpD59D\u002fEBZAiERnFZezHECEaCFOJH0iQJ6LD6MStRpAIPg8FEDaFkC77pOHxHsXQIKYaKpe0hxACHYgLLeVH0CMjGdWsu0gQAp8kRBXiiFABnaqzwIiHED4o8ohaUYjQB6ez25KZhVAODyihKQZGEAVl18M61sVQGTFlTpa1iBANQ9RIDogI0A0b3ba87kVQAxFuokw1xlAVn0YdOV5IkCZF1qq3xEYQF70crxSgRhAmjv\u002fDYqqI0A2Tf\u002f9wFcUQF0zMnYpyBVAp+Va3dnnI0DrnbA6MMwgQK2PaoBKXx9AvDk4yCcMHkCMHtGK3hUiQOBifysgRyJATjUTFEe3IEAgYn34c1QgQEvJtnn6nB9ArB0t3VRRHkCp64NUq68jQOwK\u002foo\u002fQiFAfOBrePfqI0DdiBMwugIdQOw4g57jZiJA\u002fmRSrrobIUDgwI46TQMcQLbiHMtOfyNA\u002fKVcwF8gFEC+RWrXTB8eQOZRReUF0xRAJG5asaiVIEDakyC8ttwcQGbJjrgiCyJAFUL1ynSOHEBCG81wE28UQCrioPQFRyNA2PjDcDBsGUB4mAX+diMVQFdwfQyOxiBA9OHZFopzHUDVGh8\u002frCMjQMrNAYaphxVAaZIfeQx9HkBDOPxNx0IjQE9mdsWUJyJAX\u002fQc\u002f+2mFEAfi1InuigXQFDGmnvezBZA6PU\u002fkkx1IkCftvrATAcYQJhU2m0ywhhAa+DPV2YBFkBjxzSzkyYhQAdYVtH4tBtAQnTQUPrvH0Blcff07xgjQOomWdPlGRVAHftwUzmWF0Cu3UU8\u002fQMjQGNEGODnGyFAI9CO58OXIkC4uGz9+GkeQL9mwKcK2SFAiCDh8DB0HkA\u002ffgMr1AoXQCqEn79enR5A5\u002f4QbQF\u002fFUDo63HexOYbQKGlPexgAiNA7w5z3\u002fe7FUD7w0ihBSwWQPObsdTv1xlARBUncGKNIkBziwdUxmAhQIJc4VeMchZAyNWlhm3JGkD1cSz66tgWQH3dkv6F9hlAnJZtjukzHkC+gt+\u002fyXceQGuFps\u002fhsyJA5RxPbzDJHUBngvyx1AkcQPq8r9pjxhlA6lCggb+pIEAkljYd5PkUQAoR+A4uZBxAepH5CPOWF0D2xOpi0owXQOzzMRoF\u002fRVAimoq9\u002fDjHkBCnaUXrbgbQBvV9Dlc+RxAzhvtX0bDIUBeDedw3NgiQI9ETzyi9h9ASi4PNTLaHUCaJ5fLdK8gQMj5UGSR6xVAUmrCJdbYIkAZMNcR95keQKkPSNYCCCBAXFQQu7B0IUDjTOlgmtAVQDw40sx3lyBAHcAuNNttF0Cz7w7mKb0hQH2I2dYpoBVArnEF66FxFUDiyzTxOfsVQD10LW8mDRpApmGJNN4AIkDWaZ3JVGcdQDlgj\u002fvhrhxAso53sF8EIkB4DnaJeHkiQIxYL6+8JyBAWHPi7bYIFUBHMGhAEY4dQH\u002fu5BowQCBA3KbYZVFMGkDeyXgP+iwhQCIhfAhIyBlApYieNe1bHkBjEq8JlEcZQIsVC53D8x1AKiHzjaXiHkAcYAKff1EeQOIpVwfIViFArTruYthVG0DnODsdngYeQIqpzBDaiSJAetNrtQUVF0Cs5RrCv1MXQPy\u002fs1wPBx9AcVZi3gmMG0AAcSNO7GAhQO7DrwgelBdA4I9\u002fCbbRFUD02rnmUXYiQA0nAofWmxZA9lpQ2knmGEBIzd08bWMjQHRwoyEaExxAnDR0LaLVFUANmCOh5FUhQJSfonbdfxtAUjbX2LijF0DaEYkjMwgfQKvTLhO59x9AAdAuHiVqFkD+wSLgFSYhQM4UO56kBBpAwBrh7AVxFEBnSg21iIIdQEU3tP5vTiFA\u002fH7caY8FGEA1ZCiKw6MaQNRXiCFsAhxAHKfehkc5G0CMiz3qO38XQCjWrBo0vB1AHUNHqkXNGED42SOQswwWQINqqlljQCFASO0cCsKpHUDXqSebilQZQDc5kkkoYxlAQvk0RIkGG0CCYbCOrWgeQIA\u002fqE12PhtAcXFUY4VkI0AOyXgkkBIeQKJvQPHYtiFAe0EZtDaYIUD8\u002fQlH7V0WQHM7peTEYyJAijXMZdN3H0C+JmlrDr8eQB5qjSf0CxhA9uOfyXiRIUDyfwSTBPkgQEfe0AG38CBAZrKTYsMAFUBS7s8vM4YdQP8kigLgehdAKT1cehiLIECGFsISqFcZQEtuU8KPBSFADHYpuE\u002fUH0Bzii+nNJogQBKLWoeEPRZA6JhcA0n8HECo043e+PsaQKiwTkQ\u002fhiBAYIbQ2Ag+I0BAUs3h0hUVQFMSLlE9sCBAa858w+OnFEBUZbCvilMZQLXSXhGnYiNAXrgm5HR0I0AtIFA36OMiQL7jBurC8BpA+IKdy5PfIUCyBWnn55gaQCBSf8phhhdA0Wa39GivHUAWl8V29VsdQN0iq6mx7CFAgOs7Eh6sFED2xGaxhMkVQMnA1q8QoBtAKjYhWCp0GkD+bx8TmFMZQI3ZsRBNnCJAqemy4d0BHEAm4+7jnmAbQC7zeSXclRpA6d6Q6mIlG0Cp0Zf1k10WQNQhSlVqBxxAHAGbxJfFFUC2J3iKX1cYQJwxxnwaMRRA7ZKHBBWwIEDKKimWtkohQHaPE1U0cyFAM3OghPxkHkAgTj3nsEoYQNTXKBxhYh9AOxQ8ny92IUAM9NL\u002fsfIjQHSF5K38QBpA2FyPhk6SGkAk3MpoeFAUQAPDfMo61B9AuD4ny5LdHEBcOdiDMTgUQEageTdcbBVAkiFjKsK8GkDdjkY8xpkgQA=="}},"mode":"markers","showlegend":false,"x":{"dtype":"f8","bdata":"ZaZJYVHlA0DmuLID8asAQLKPz+BPTdA\u002f5f5a+KGH6j80JHzLf1HsP2LrYV2TLwNAzIeQle4k8T+KwUeudAkEQH6AjqCL0QVAaja7ae7M\u002fj+a\u002fNdmJ+r0P6oQfmdibfg\u002fynzdDP+b2z9k07IszIvlPwL6k9GlJQNADJnTzltQB0AVAYZ2zG4DQDIaHeZofQRAE6ge4I1d+z+UCLa4WIHzPwjSkmGUDrA\u002fFP\u002fx8mgF8z+A0ydwAF7PP8+iGoSjlwNAXXn9e8rl5z\u002fgCExiRNIBQONJe\u002fR0+eg\u002f1LRMUcCR9T+57Wnf+0cBQOxPFX6s8gJA3YnoU3hsAkCqzPCNmPz1P179nEf9P\u002f8\u002f1+uIDtAVAkBqDbwN8eMFQMBw0mhajPM\u002fLJMqd6O+8T8Ea\u002fPpNGnCPyDwxEL+vgRATJnORxoBBUA6JZUI\u002f8v2P\u002flJS3BKWwBAbUkXrjhJ5z+O8Mtkq\u002fj5P9pKkjl3cPQ\u002fqFbcxuHy3D82CojwFjcGQIatSsHD5+0\u002fUyL0tx40\u002fz\u002fm8YEgKnH+P0zlKQoR\u002fgJA2GY9A3PI\u002fz+m+zSwPu\u002f+P7mcLhcCdf0\u002f4UxKp4IVAEBeBvaxAZEGQKD8JBQ+b+M\u002fMI9ipyinrj+G8JNULukFQES\u002fNrlICf0\u002fijKrOxMK6D909Bir4Rn8P4UTS8y0BgFA3O0Dbt209z9hG0KsbnQCQJ7QJYDoOwZA59YRMkP6AUBDY1\u002fPlOgGQPDJnszvLuM\u002fkPnByo5l8T+FqLY241r\u002fPzQaYz5vxAZAtmf\u002f9SfX\u002fj+8M1SHJrUFQHT45EAd9QJA8X8jCst5\u002fD9rW6dEZz4AQGqsYNCPUQBAIglU6pkc+j8O0Lx8tdDjPyKuYuGqftk\u002fEWhIG\u002fkS6j+nubUD7nIEQKxJsvivteQ\u002fZh5lXNUQ9D+Ma4fa7ET3P\u002fRJzCV1Vfg\u002fPXNWwDyX+T\u002fZcT\u002f9z\u002f\u002f9P5oQ+dID5vI\u002fcHO5LZ\u002fB+j+u2ezZnxf0P6p4eMrFh\u002fs\u002fvhwSxSV21T+GT9OIjx0AQDu+GFcwzwJAuuSNDdef+T+vGUl9XZPsPxqOXI4fk\u002fk\u002fEt3q5iAlAkCaUelOu48GQKFruUniL+o\u002fOHPEHrGc\u002fj81roPeI5vzP4CXfoUegPo\u002fFl4eqEOb8j\u002fyYx1BWBnjP0mh5fVJevI\u002fmt84a6b6B0DmaDZRgOfxP7yQ+ZKK\u002fQJAhR6q4jXUA0AwUHFh9ZrmP6bEkeD24PI\u002fpfqGG0ro6D9SrmZZvfn5P617cj6t+Pk\u002fAtKPombCAEAIz7j+22kHQIjwJicF0v8\u002f9FIlLAGaBkA4ae+Vo+XsP5kRN6ETqQFAQ+ZMMmqR8D\u002fpc+KGkxHzP0mupFe20eQ\u002fRycll1mH6D8bcc7al3PsPzoCPtsuFgNAUPC20QQjAUCfG2ekqeYDQMPopcVK4QFAavX9OsN88z\u002fOPq1Xqfj+P\u002fuZAfDKRgRAfI7EhOMVBUD4\u002f07cyQy4P\u002fuaEjay3gVAGtOVviZU9T9Skr0xq6HiP2gHXiBiFwJAtJdZLE+34j81FmF63Z3oPykCwpQxWfQ\u002fEIQ6gkeB+D+AMmtJGi32P7WGN9J1rPY\u002fGcDF2Drb5z8igcQg9pPwPyhaBeicsrY\u002fwM37H1fQoz9cHNBkuLHIP+Q4u+aKBfs\u002fN5SBADuYB0DN\u002fBCTwZcGQES7vYWJXMY\u002fVtFO6L0g+T\u002fmxu9uy+baP12ufoz5n+k\u002frKQNF67tBkCCo75Ksm\u002f+P3p0cBefIOI\u002fQKq1f9q3ij8jHp9hryT6P2+VWVrCDvk\u002f+FsJwpvt+j\u002fOzCYOeoYDQCaqMtD+LPg\u002fJrPNvMZk0z9wCfpDsbiqP6qCqyYCbAJAaCutMuUb9D9cLTnOZb8HQDg+gHvAsNk\u002fjF0bhK0Y1z+yZCGPqfIFQPRsXUxxo8w\u002fRriHyWe4+T+QLhULnafOP64IXtao\u002fQFASuS4VtNf2T+Q3HPLLNUGQDh1Dgu8Rek\u002fHkeoCKPU0T8IQhqMit\u002fdP4cO8zGUdeo\u002f+clG2KP1AkCslyTMFWP\u002fP32GaiCo7ABA0H5Q8uGBsz9GyJDmg9YBQCCPlLZDogRAVki1\u002fTXFAUC8oS+JjQb0PyDi+08GybU\u002fipMGiOd9BkAgZizYBgj4P+y78YN0HdQ\u002fg\u002f8UgZy\u002fAkAVLYKorQnrP16zwP1WWANAvTDFI4nB+j8QrgzrNt6pP5kD0z\u002feNAdAbPge8aX6AUD5O6mjSdnrP8h9Wcuz9Mk\u002fLIV4NetsA0Ds0wRETFr\u002fP0555dkm\u002fAdA\u002fPtRWHT28j\u002f6bPRHyP4CQLB+lKbkdeo\u002fvfpAOHly8z9NoZMPtj7pP8oXWpPrbf0\u002fz6M6CLQ3AkDuSDgupw0FQLNtC\u002fcUbeI\u002fSLoRyYIn2z8rmTuT6hMBQAlENslpRek\u002fPM2USDkR4T86FStCNKwHQKq88OSwaPY\u002fA+e1HcyNAUDAhKPPzqXeP\u002foNZOIssPs\u002fg+Zj1RFPA0AAdIQK522dP\u002fJOKJKKNfI\u002fYLIkqq4KA0BeBsTEPYn3PzTg\u002f+87PsI\u002fFpdHapie8z8uv3FZ8Q8EQEr9\u002frd\u002f+fY\u002fkJz\u002f9xquBkBpFwBsgtzqPwjV2j7ei9E\u002ffyNZyKYk\u002fz+y4nNxi0YGQN0eeVwVRAFAyyLKWh6o6j8esYZ5hDUBQJWaq\u002flDTO8\u002fhHEqIgKd\u002fz+OHcs6Sp74P\u002fEoJXwjKgNAOsV1hUXi8D9J9PSp1er9PydnLA8O4AZACPMIgnQEzD9b6zAb023qPw3pYE5o0fo\u002fVtNm\u002ffuu1T9yPp2fAr\u002fkP09hxV5pD\u002fw\u002fBnImapkf3T\u002f4LIEgbn7APygmZ\u002fCNu88\u002fmI7faU1A0T\u002fmupa79zXxP5GkSZ9FMPE\u002fQFE+uLiZqT9\u002f2dbLXX72P8oZF2c5Lfg\u002feLsMehR5B0CSjvD007UCQNmd1FisI+Y\u002f8m5qYyTu9j9KWWwqZD7aPwzh7jbxegZAcMNvLry6pD98f6w0Fp0CQK56fBZo7\u002fA\u002fOJ4B0UPOyj9MdghIojDqPwiA2KmYbLc\u002f9PHUk15+3D8uxGCJUjoEQE1JNnuEzeQ\u002f+h17pQDn5T9o+Ikb\u002fdTwPynhfnTnm\u002f4\u002fxwYRUcAp6T+u3v12g8PyP3JiXGiQgv8\u002fGOo\u002fxnVi4z+UqqfijHAEQMzNCL56gv0\u002fShUSt7LmAkDN5MfgXPP1P3N5aEoeYPI\u002fZCashnM2+j9OTBnRqsHhP782zjXuMgZAQoomp\u002fxC9z+gEqhOFisFQLBsDwOvRO8\u002fHmTPoaZS9j+sNWdC7AvMPwsO0L+ix+Y\u002fMVF5sL4A5j+CnBF9Q2f9PzPgSzI6hgZAFrS5UvdC0z+U+2DouW3qP1IOZfE0lO8\u002f9FpG5gIS9T\u002fLsS7iLMz4P1peCFO4wQJAOLZuHf6o2j\u002fsNYwTLSEDQPjA30y\u002fFQRAtVOFZEAG6D9YLls6YbrwPyM7oRBXsOI\u002fMlddUCKcA0D4nHf+iebkP+\u002fAQhmt+\u002f0\u002fY41NdjuUAkDcKRCr5Vr7P4gjP4CYv\u002fU\u002fDiA6xnouAUADgVH8mg3sP2QyTLegi\u002fQ\u002fXAWrj4pW\u002fT9beN3LcjwHQCszw3hjVANAQKfKhlmeqj+FHx2URL\u002flP55Mnu7b9fE\u002f2eqQqlkP8z+iLCRTK8LUP+0M6Hf5YABARCxLaXoe8D+EOdTVgEz5P7wKgJa3\u002fwFAcHBGqWGy8D8i4Y9e4SD\u002fP63jGAbHseA\u002fCviL2Sw30D+khCIrLqsDQGBlwLQdQwRAWFXE3YabAECq72ZYCtvlPxSQjSeXO\u002fo\u002fEEZZEInb8z8HO4ZR1EzsP2j0r\u002fEuowVAggrmnJow1j8IZ2gUas38PyaZ\u002fbKxIQFAWh2VLtyP4D\u002fWl31g8lvzP3wnt7\u002fQePk\u002foiet2Y6y1j8LxqFBAjP\u002fPz5FUoX73wJAUr5qqjiVAUBKb5R43af\u002fP5qBTZbguARAgZCvVAW39T8mvYLxJy\u002f4P8hzIOTqzeg\u002fI\u002ffr81iR9T\u002fSfVPvAjrrPyhJOprDhP8\u002fdsolZwIL9T9omss5bbX7P1oQj1FJVvU\u002fjSTnPjk3\u002fj9l6cZYsMTpP3Rp8FK7egRA0l8u2Vcf3z99KxuGsSDqP0fl3OaDdAFAzuB1\u002feUMBkCOVnwZvgftP7LD92e0Ods\u002fHulOdAKlA0Akmu9SeSLwPwC3q53ldME\u002faK3P5wF+AUAspXt4xzL8PzR0KxEgMPQ\u002fEOnVj1TfAkB9WTlMeTblP1xK5GkCFPw\u002fiCRctYE8A0Dq+zywrlftP0CCKEgUgdY\u002fynt13lPj5j8X0B3X+QP0PwpPSjtzV\u002f4\u002fAF2zRIHtkT+SoCw5TgYAQGYyiUpOD\u002fU\u002fTLCxrbOABkCmlLv8qhLcP2CezFgBEAVAIKW3NV+GAkDgVaiqNwfXP0KLh8LBzvw\u002f87WfwG6pB0CcVXKTnMPzP+blzMYJRARA\u002fZJxBmDP6T+QsiBIf7aoP1y1gTxv\u002f\u002fE\u002fiouTYkOZAEDR2XLfTfvmP60psePbuwJARgypgY1fAEBwucPgLuIAQH49bZjbzwBALecwHu\u002f+BkD093bca1vaP3Q2rYG+Mto\u002fKDuV2kWJ3j9COoWSEbsAQPhEdxHmZrc\u002fej4QF71Q8T9yrx183WEFQIq7+I5Qj\u002fo\u002fxJmZYqnm1T\u002fUAVD74jDfP\u002fRUaSjRQ\u002fA\u002foeL9w4FR4T9waOCchiH8P9yC2Tz2SQdAqFCvs\u002fKyAUBPoDpMEdvlPwAtf\u002fOeLN8\u002fMqEGusm4AkDIaOIln1jKPxI3j1Tr\u002fOE\u002fKzASUGRGAEB\u002fdTTfozIBQPD4EklwJgRAxAE8m2a4AkBWKndw37P2P2p9ffzHBvQ\u002fdg8g7HB63D\u002ftvJc3H1H8P+J3mo3ANwRAT5MUl3\u002feAUAmcEUrIvPxP8rrFYUoZ98\u002f0zAMR1OJ\u002fT+CPWZRIjXsP+hHML9YPMU\u002fxK5SKoZrBkCWahlpkN3kP\u002fIfpXo4d\u002fA\u002f0EOUVbjZB0ApWEMiyiEBQGWsC2FRhgVAOOaDmen72j8W7rd\u002fUTAHQCIB68ebPfA\u002fcWv47kEg4z8YKyDH+WgCQKjllCbQmbM\u002fV9QlCQu7BkBAAJ+s4BQBQGJL+EvHjgRAYqgl\u002f96fAkAr5J1taEr3PzAHwmpZv\u002fA\u002f9L7Tu0Uczz+9Vf6h6z3hPzcMkKLz2ARACGw1Mxl\u002fxT+UxnlotAj5P5Dj2GnbJso\u002fSk4gOj0yBkD4UJLHEn8GQFzq40Y3pvg\u002fBBCl1pghBUAE6XAEyWj8P3xKlFEJIMs\u002fCUOD5Z2k+j9aj6Dfx3HhPxBXLjdYias\u002fD7JdzbwV4T8+PsttGwQHQCD\u002fTb8NCdE\u002fNbRQAzrtBUAsWSyrK7HRP0B79I8oNY0\u002fdFb4B0PKxT8Cx6nZrd7dP75VB2h8u\u002f4\u002fHmwpK9Pp+D\u002f0S7xenYgDQDrD6a1bXeg\u002fbbcRR34r9T\u002fTSbkebCnyPwUmQObrE+g\u002fXv5T4U5u9j964epGVXvnP6H1O7DRrQRA2\u002ftfX8VB5D9094tpfITgP\u002ft14u6zwO4\u002feJnMO\u002f0N7T81G8OpMkfhP4xrBTCPTNI\u002fyg79\u002f8gc9z9TXlNcxDEDQA=="},"y":{"dtype":"f8","bdata":"cH9iISbfuD+Gjzada6LYP1SVraWrBOQ\u002fECAcF5Ozoz+Il+rvJGPBP5jdCsyiguw\u002f\u002fpXtDf1J7T8toa9MU0HlP9Bl09IPQMo\u002frJaFQHoB2T8EfztEDL7fP1B4biaYqKs\u002f8Y5ciKeu4T+BmmzIr\u002fnjP\u002f17i9rnHuQ\u002fYJ6kIeG45D9YM\u002fkgeKfpPwJEWuewVdE\u002fk9sZfvw37z9UTeu+\u002fKfPP1TKGwjRM+I\u002fdNlJRqr67j+oT7Pb+erNP7UrdQP5j+4\u002fqHphX9b82z\u002fe7m4XazLaP4uHWj6bSeE\u002fWN0LYH5O5j\u002f7h6NNjVTjP2LNky4\u002f0OI\u002fAOSoFsH+4T9MEdoEV7HdP4zdo5o8acs\u002fWLXLEtb71D\u002fcUdd\u002fiInoPyIYSEPvH+E\u002fzl+Vsd\u002fs3z\u002faVgqX193UP+e3xz2mauo\u002fmlqZDd533D\u002ffLu3FKQzkP+AQ74rtHq0\u002fyErpWFMi2D9cQrY7AbnvPzfEmPqu6+4\u002f8AFGDuCL0T9MwLmT2CDbPwIH4KACqdc\u002f+5uufM7d6j84+1xqANPLP+i2Mfi7o7Y\u002fm2Kn6Jz07j9809mlCPjOP54\u002fIDVCieI\u002fmDDJiWvT0T8UW1KYRN7FP+CjraUE67Y\u002ftKYkr2Oq7z+CdXpZ4ZDQP9pyrPmXj9k\u002fZCVaCHVI5j+UuIKpHrPNP8FJh4XB+eE\u002fWFrH33rawz91vyrPiQPnP060T1Ztd+o\u002fkD2WzmWY7z92xccJ6OzuPxjlL+h\u002fILg\u002f3jfF3XJA7j\u002fQBAairMjGP\u002fyppg9LtuI\u002f1gtecfMu2z9grdrfO++3P1rp\u002fL4E3NE\u002ficU\u002f9sDl4j\u002fI\u002fkfy5NPLP9ASF3TbpLE\u002f1xnsJqN26j\u002fh\u002f8fVBCPoPzzzFtLmWu4\u002fONxzPlIAvz9EE3j3ml3KP4ioPoq+lLY\u002fHGUtBbx2wz+8TDIpyQjPP+TvEU\u002f13Mo\u002fztcZP8v71z8kebOR93nfP\u002f4MxL09I9M\u002fwPGgtHS67T9Aud3x86m7P\u002fENg7ueveU\u002fftxPg45h2j\u002fnKhdSL\u002fnkP\u002fDIcMkyI8s\u002flJYcUnB67z\u002fAEmujlo6GPzYIGwfLgdg\u002fuESpPDAQ7z+A6X0SLZN2P07yR25fO+4\u002f57ZHr7Q26j8hTqrmioLpP2RGC86\u002fJOs\u002fEKJAXARZ4z\u002f0Nrxn7mrRPwTnRDGi8so\u002fSks1\u002fVHp4z9ZHnJOCvPoPzCvYHSL18g\u002fEm37s7wI7z+TOF2Qor\u002frP9MKm1HRPuA\u002fmoI6\u002fCAh0D9yupQQ4v7aP7hdpAPzcN0\u002fzBLTXgEI2j9Qk6ity4S0P\u002fBAycg7P+s\u002fUK3jxFxWuj+UVccMwdDcP+DqHB6Cfpg\u002fEKy5CQCyuT9yU2MzVTToP9myjRnB0uM\u002fyLFVl1Wm0D\u002fgDcPiu9GfPxH9fO8zruo\u002frggXDEvr6j8g67riwCebP1I1ud+piO8\u002fWIKX21Uf4j9GmSLnQ7vdP5geXK1iKdU\u002friR6MrW70j8ifdiFQTDiP6QWLSZuMN0\u002fdA7Ugoff6D\u002fS5KvSFlrdP+FylSZtMuI\u002ftlJUCo2P5z\u002f8ezEULwbuP2BayNz4qZ4\u002fp1UXea4g7z+dk5g0dnXhP7CwkMi+Yus\u002fXsyhqhia1j\u002fSSWnRyxzcP3A2uJ\u002f9b9I\u002f4MVYpjxs4D9rpsVMgz\u002fvP7ue6UoXWug\u002fjYWHKkVm5D9QBeQ5OTHMP+0UktAT4eM\u002fGtxIwRGJ2j\u002f+eAKX0d7iPyHVIt4\u002fuOs\u002fu+HucEwi7z\u002fQsI13+L7YPwUsDru6keI\u002fBNfetb8+7j+JinLERcvgPyppFoPWz9o\u002fwMnh6ho+0z8xiddjfj\u002fqP\u002fiHRnJzTrA\u002fyGrZCaSQ4T++qptXy7XnP8CbBlsaJN0\u002fUyDnGQrN7z8gD4gxk1GZP96CTrjbouc\u002feLc17B5m5T\u002fg0BSUYFnjP9QZzqkKycY\u002fRTi6X3cK4D+M7mg+qWLQP9SiS3EVr8A\u002fDfe+V\u002fDx5z9OsA8satfhP24zYk5BNtc\u002fuQHk1sOA6T+YC1MIh9+1P3Gkh8mZDOQ\u002fiBC\u002fI30nzT8PwGvS2aXvP8q13tQj\u002f+o\u002fQf7MoTNq5z8wd6Ve8IrLPxMD+kAaoeU\u002fEKsRujmfpD8IjAKNKeLcP3bbgmh9ct0\u002f7CP0SutO3T+6L0FewzraP94r23k4Gug\u002fkA7Zs4D01z966TgQoJXZP\u002fQOOWuAt8o\u002flH2MHOm3xj985pPXuQrqPz1bPZ6jGeU\u002fuAJJtvpv2D8UgiSPNo\u002fhP9h3D6rGqL8\u002femiHLsqG1T+QjyuG7kiuPyhROUq46s0\u002fxideE6GQ6z+MMh\u002fXaTHUP9+Ar+yC6Os\u002fABDP+TiDVD9wpfLBg36oP6snBDazge0\u002fYbDcu0Kx5z\u002fEX6O3epLuP6ch2UUe1uI\u002fmBJDPCd\u002f5j8hdewsyxTrPyAx+0UR2t0\u002f8Hi0XGUlqD90A1XI\u002f47uPzjoENg4Suc\u002f2+FEXcL97T8nsWUbyNzoP1\u002fSgreNIeY\u002fAYhGXhAY4T9wBJim1hKlPzS3xEW8se8\u002fh3K5iGei5D8hiNiqc0XrPzyVssiuVdY\u002fC+uxdA0Q5T\u002foygoEQK+4P5jKznpljME\u002f8C6L61U4vz+IXlE9P8nZP1hSbOM\u002fneI\u002ffPNQ66Pt4D8aejNXdtHTP3uClad6kOE\u002fyLfTs6Gt0T9nmWXJJzzoPyhKT1wBlNI\u002fwI\u002fNFdX2qD84nvHFGUPkPylxyUVyYOI\u002fDRZKQz045z+sqZ7+7WjcPwCMdWKOssA\u002fhLJXtHdM6D\u002flKuNZTybqP4ShawGGAdQ\u002fBGRNkQ\u002fd5T+8ud+JkoTGP0SPwP6ZBe8\u002ffldNWju+0D8wuXC7pXe2P4ANy+iR73Y\u002fOPEeEL3J0D9ovzKSle+xP3zhxpYR2Ng\u002fqHSQD4rrtD\u002fIdod774TKP60+bRLAou4\u002fHJPFTgLH1j\u002fo36zGcti5P5J6QiV9IO4\u002f8I7UIBAHsT\u002f3oXyXEFvjP3Yi4lCrS9s\u002fPXF4AAwn5D834YLRLefsPwAtfN5KvL8\u002fxg2AXmhx0D\u002fwtaT1GGTaP9fqpbQjA+M\u002fKTe+UfOO6T+6VsN4r4nVP9gYYrSI5dc\u002fdD9XVU0LyD+WyLbUe6jlP6pC7bx7gOM\u002fNnyTie3s0j9llOohdgXmPwzHPhqb7dc\u002fymccnhfW4j+WPguxVNfQPzTJENPY1u8\u002f7EjLTNid6T8Gmn1jCk\u002fXP7yUknDeat4\u002fvLU+7Vw5zz9g0nw9KHmkP1jNvtYGJ9s\u002fhPL5Xydz0T8tq8zuPyHsP9WorCa\u002fXek\u002f+yp9nS+D4z+L5nsCTCzqP1x+ul+5S9U\u002fGOGvX4ss2D\u002fNlsUH86zlP6GEdPrT9+g\u002fxzc2vxjB6j\u002f3TZXHGvfiP\u002fqS2rRvlNU\u002fS2pHvDZw6z+7GMLh3gjnP6a9x2Pkpus\u002f1JmpT1pEyT82h2kTdEjvPwRhR9aBC+8\u002ft+6jIIBI5D+1bAwI8K\u002fqP8JAPZFRT+c\u002fF9c7tfGS4j94Y9BCflrHP\u002fAu4cshTtY\u002fQCfDNwoyzj+ECan+0k7TPz7jqM8VBus\u002fsry0Btuj2T+xEei0XG7hP2+p5pw8We8\u002fCvTCJcvQ2T\u002fwfouU627bPzbL0rxeSNE\u002fSvZDmHWd2T\u002fOPaySqGDsP\u002fDMavHKVaI\u002fotuclkDR4z+7658CHo7oPzoYtbNUdu0\u002fLYe\u002fKfc45D8tN2BsZTXnP7Qt+yVxA+4\u002fktIsLr3r0D9LPsWs7NLgPynH+wIJDek\u002fEz4WIunp5D\u002fMJsCb3wPaP+BdtHmEka8\u002f1KKh5c4w4T9Qz1Pi9Q7NPyCSkwTGiKI\u002fAHd1k59Jmj\u002fETynUKPfQP8yuZX4PL8Q\u002fO34TJFJl5T\u002f493MDCMzlP\u002fP8xwg5ue4\u002f4kwd07cs7z8R4i9OWOPlP2zQQhWUPMk\u002fgZDjica65j\u002f5buYTMw\u002fqP1wWrHambsg\u002fBmmP26wL6z8IOCL6pqHAP+l99vABZ+E\u002fpn0TJ4f95j+o\u002fE+7ary7P+48GjvJ2e0\u002f7KoB3R317D+4Fk13HA7eP+KPVIJYB9Y\u002frMwFaa0v1j8YJEwtU1G8P+ANgKuJzJc\u002fhNeUC3cA7z9ouBLOqhHIPwwWxX87bMs\u002ftNqvNXZTzD8T+0lIctjtP+VSqo8GF+s\u002fLCmwuSJS2j+4BaK5THXNP2AMmPbWMaM\u002fEMWQhPFLsD+GsvylVHTfP3Avp7NfE+s\u002fuGlXKH5B1z9CH+Z8AZXpPyQlnXKBTdY\u002fknVPVpdZ7T8GCn73ApjmP76+hgRAHOc\u002fQFh\u002fVeUMlj968sO6rhHoP2okUF9bEOc\u002faF4i93G42T9sjO9tATvjP0j30CRKfr8\u002fmItrKbjVtT9EMx5fNGLYP1MCmbF8beI\u002fEN9P0nKp7j\u002fokXOePEPrPzQLIcE\u002fysY\u002fAdPXAz8B6T+oUQMEaQDjP8SwQhBw6c4\u002fHK826V3M1j8g2A3piu7HP7z+tsYFKOY\u002fcMY32CBdyj9Q1f\u002fP8cTPP7pT3dtmxe8\u002fuOkUhe4Xwj9IZ79sJ5jcP6agvaiiUeA\u002fQJTXOuicxT8oI53C2N\u002fBP6onWAhfWdY\u002firqTNAz57z\u002fA1W6genWbP9kF6+qQQek\u002fqzmLJeAk5T+gt63ohemjP4BDryTkCag\u002fQJbkO5iFmT\u002fgUkSQHtWaPxxFMGEpEMs\u002fFiyHyWiE2T+Mmq6o4sHZP1cp8LhVZOw\u002fgc9gLcDT4j9Z2o0L2E3kPyZciQJCg9Y\u002fuMj3k8Kgsz+2hlIUbdvWP+RQp+OPPcY\u002fTt6vELP27T9WrCHNd1rrP5rAm9VuYtQ\u002fsori2n2L1D9y54Y7rX7eP2JaN3aTGe8\u002fyDFd\u002fdAszj8QdeVhZ7W5P0gFtI0k988\u002feo85vECE7j9inVyB81LuP3aVZMxledw\u002fIITWrUyl7T+vCO68XrziP3itbaIKA8Y\u002f3XgAe1vg6T8NQQDyv\u002fvoP9dJLYOM4+s\u002fw\u002f\u002fi7cYB4j9cD0m9c8nOP\u002feO9umqvOE\u002fZv8cZmZK5z9zeJFnt2PtP9vWRWTGEeM\u002fqMiVW22o2D8tvsAd\u002f4TkPzx385td19M\u002f4OqVv3zL7j8K6vYrATnrPzVfIJsis+w\u002f0T3W94ln5j+mwAcnzd3cP9hFs4pCO7w\u002fgik4z5Aq0j+4uMxSGuHAP6vFk0IUzug\u002fnHA74II2zD+oajhg2ZDfP0ire\u002fEy\u002fe4\u002fYIfRmK4Psj8KtKBD77XSP5AcJF5CFbU\u002fXDgDiW7l0T8YgkZUOWDKP75viylnQNc\u002fmODKQLSItT\u002f1GidFidTuPxNjBwjXN+k\u002feJoeeJs7vT+CHWu7SCLTPyCuqVjs\u002f5A\u002f3snCbF3+0z+AMzads2vRP\u002fpBsnkcsdY\u002fdG0oeU+AzD8ubss5DivdP83wQNRTQuw\u002f0HMxgeyxqT\u002fAVCzJ\u002fgC6Px\u002fF0Hf\u002fh+s\u002fDCqSMDpm1T+iU4Lx4gffP4ZdjcLVmdM\u002fuMyyKr1DyT\u002fIEwq60TXoP2aog8ykL9A\u002fzYZh8G1K6D\u002fpO4sG7CvrP9RIFLkqYdE\u002fcGClag11zT8AiLuKcY3ZP3htdc47Kss\u002fnqaBMP2C1T+qTzG670nrPx5wmwWI9do\u002fQDGvQ0Vjij+n6KEqWC\u002fnPw=="},"type":"scatter"}], {"template":{"data":{"histogram2dcontour":[{"type":"histogram2dcontour","colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]}],"choropleth":[{"type":"choropleth","colorbar":{"outlinewidth":0,"ticks":""}}],"histogram2d":[{"type":"histogram2d","colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]}],"heatmap":[{"type":"heatmap","colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]}],"contourcarpet":[{"type":"contourcarpet","colorbar":{"outlinewidth":0,"ticks":""}}],"contour":[{"type":"contour","colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]}],"surface":[{"type":"surface","colorbar":{"outlinewidth":0,"ticks":""},"colorscale":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]]}],"mesh3d":[{"type":"mesh3d","colorbar":{"outlinewidth":0,"ticks":""}}],"scatter":[{"fillpattern":{"fillmode":"overlay","size":10,"solidity":0.2},"type":"scatter"}],"parcoords":[{"type":"parcoords","line":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scatterpolargl":[{"type":"scatterpolargl","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"bar":[{"error_x":{"color":"#2a3f5f"},"error_y":{"color":"#2a3f5f"},"marker":{"line":{"color":"#E5ECF6","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"bar"}],"scattergeo":[{"type":"scattergeo","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scatterpolar":[{"type":"scatterpolar","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"histogram":[{"marker":{"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"histogram"}],"scattergl":[{"type":"scattergl","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scatter3d":[{"type":"scatter3d","line":{"colorbar":{"outlinewidth":0,"ticks":""}},"marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scattermap":[{"type":"scattermap","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scattermapbox":[{"type":"scattermapbox","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scatterternary":[{"type":"scatterternary","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"scattercarpet":[{"type":"scattercarpet","marker":{"colorbar":{"outlinewidth":0,"ticks":""}}}],"carpet":[{"aaxis":{"endlinecolor":"#2a3f5f","gridcolor":"white","linecolor":"white","minorgridcolor":"white","startlinecolor":"#2a3f5f"},"baxis":{"endlinecolor":"#2a3f5f","gridcolor":"white","linecolor":"white","minorgridcolor":"white","startlinecolor":"#2a3f5f"},"type":"carpet"}],"table":[{"cells":{"fill":{"color":"#EBF0F8"},"line":{"color":"white"}},"header":{"fill":{"color":"#C8D4E3"},"line":{"color":"white"}},"type":"table"}],"barpolar":[{"marker":{"line":{"color":"#E5ECF6","width":0.5},"pattern":{"fillmode":"overlay","size":10,"solidity":0.2}},"type":"barpolar"}],"pie":[{"automargin":true,"type":"pie"}]},"layout":{"autotypenumbers":"strict","colorway":["#636efa","#EF553B","#00cc96","#ab63fa","#FFA15A","#19d3f3","#FF6692","#B6E880","#FF97FF","#FECB52"],"font":{"color":"#2a3f5f"},"hovermode":"closest","hoverlabel":{"align":"left"},"paper_bgcolor":"white","plot_bgcolor":"#E5ECF6","polar":{"bgcolor":"#E5ECF6","angularaxis":{"gridcolor":"white","linecolor":"white","ticks":""},"radialaxis":{"gridcolor":"white","linecolor":"white","ticks":""}},"ternary":{"bgcolor":"#E5ECF6","aaxis":{"gridcolor":"white","linecolor":"white","ticks":""},"baxis":{"gridcolor":"white","linecolor":"white","ticks":""},"caxis":{"gridcolor":"white","linecolor":"white","ticks":""}},"coloraxis":{"colorbar":{"outlinewidth":0,"ticks":""}},"colorscale":{"sequential":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"sequentialminus":[[0.0,"#0d0887"],[0.1111111111111111,"#46039f"],[0.2222222222222222,"#7201a8"],[0.3333333333333333,"#9c179e"],[0.4444444444444444,"#bd3786"],[0.5555555555555556,"#d8576b"],[0.6666666666666666,"#ed7953"],[0.7777777777777778,"#fb9f3a"],[0.8888888888888888,"#fdca26"],[1.0,"#f0f921"]],"diverging":[[0,"#8e0152"],[0.1,"#c51b7d"],[0.2,"#de77ae"],[0.3,"#f1b6da"],[0.4,"#fde0ef"],[0.5,"#f7f7f7"],[0.6,"#e6f5d0"],[0.7,"#b8e186"],[0.8,"#7fbc41"],[0.9,"#4d9221"],[1,"#276419"]]},"xaxis":{"gridcolor":"white","linecolor":"white","ticks":"","title":{"standoff":15},"zerolinecolor":"white","automargin":true,"zerolinewidth":2},"yaxis":{"gridcolor":"white","linecolor":"white","ticks":"","title":{"standoff":15},"zerolinecolor":"white","automargin":true,"zerolinewidth":2},"scene":{"xaxis":{"backgroundcolor":"#E5ECF6","gridcolor":"white","linecolor":"white","showbackground":true,"ticks":"","zerolinecolor":"white","gridwidth":2},"yaxis":{"backgroundcolor":"#E5ECF6","gridcolor":"white","linecolor":"white","showbackground":true,"ticks":"","zerolinecolor":"white","gridwidth":2},"zaxis":{"backgroundcolor":"#E5ECF6","gridcolor":"white","linecolor":"white","showbackground":true,"ticks":"","zerolinecolor":"white","gridwidth":2}},"shapedefaults":{"line":{"color":"#2a3f5f"}},"annotationdefaults":{"arrowcolor":"#2a3f5f","arrowhead":0,"arrowwidth":1},"geo":{"bgcolor":"white","landcolor":"#E5ECF6","subunitcolor":"white","showland":true,"showlakes":true,"lakecolor":"white"},"title":{"x":0.05},"mapbox":{"style":"light"}}},"margin":{"l":0,"r":0,"t":0,"b":0},"xaxis":{"showgrid":false,"zeroline":false,"showticklabels":false,"range":[0,3]},"yaxis":{"showgrid":false,"zeroline":false,"showticklabels":false,"scaleanchor":"x","scaleratio":1,"range":[0,1]},"width":1200,"height":400,"paper_bgcolor":"white","plot_bgcolor":"white","showlegend":false}, {"displayModeBar": false, "responsive": true, "scrollZoom": false} ) }; </script> </div>
src/index.html ADDED
@@ -0,0 +1,454 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html>
3
+
4
+ <head>
5
+ <script src="distill.bundle.js" type="module" fetchpriority="high" blocking></script>
6
+ <script src="main.bundle.js" type="module" fetchpriority="low" defer></script>
7
+ <script src="https://cdn.plot.ly/plotly-3.0.0.min.js" charset="utf-8"></script>
8
+ <meta name="viewport" content="width=device-width, initial-scale=1">
9
+ <meta charset="utf8">
10
+ <base target="_blank">
11
+ <title>A Primer on LLM Embeddings: Understanding the Semantic Backbone of AI</title>
12
+ <link rel="stylesheet" href="style.css">
13
+ </head>
14
+
15
+ <body>
16
+ <d-front-matter>
17
+ <script id='distill-front-matter' type="text/json">{
18
+ "title": "A Primer on LLM Embeddings: Understanding the Semantic Backbone of AI",
19
+ "description": "The role of embeddings in traditional NLP, and their application to LLMs.",
20
+ "published": "Mar 18, 2025",
21
+ "affiliation": {
22
+ "name": "University of Bologna"
23
+ },
24
+ "authors": [
25
+ {
26
+ "author":"Hesam Sheikh Hassani",
27
+ "authorURL":"https://huggingface.co/hesamation"
28
+ }
29
+ ],
30
+ "katex": {
31
+ "delimiters": [
32
+ {"left": "$$", "right": "$$", "display": false}
33
+ ]
34
+ }
35
+ }
36
+ </script>
37
+ </d-front-matter>
38
+ <d-title>
39
+ <h1 class="l-page" style="text-align: center;">A Primer on LLM Embeddings:<br>Understanding the Semantic Backbone of AI</h1>
40
+ <div id="title-plot" class="main-plot-container l-screen" style="overflow-x: hidden; width: 100%; text-align: center;">
41
+ <div style="display: flex; justify-content: center; position: relative;">
42
+ {{{fragment-banner}}}
43
+ </div>
44
+ <p style="text-align: cekter; font-style: italic; margin-top: 10px; max-width: 900px; margin-left: auto; margin-right: auto;">It's nice to have a cute interactive banner!</p>
45
+
46
+ </div>
47
+ </d-title>
48
+ <d-byline></d-byline>
49
+ <d-article>
50
+ <d-contents>
51
+ </d-contents>
52
+
53
+ <p>Welcome to the Distill Blog Template! This framework is based on the <a href="https://distill.pub/">distill.pub</a> design and has been adapted to make it easy for you to create beautiful, interactive technical blogs. Whether you're writing about machine learning, data science, physics, or any technical topic, this template provides the structure and components you need.</p>
54
+
55
+ <aside>Reading time: 10-15 minutes.</aside>
56
+
57
+ <p>This template includes support for mathematics, interactive visualizations, citations, footnotes, and more. In this guide, we'll walk you through the structure of the template and how to customize it for your own blog.</p>
58
+
59
+ <aside>If you have questions or remarks open a discussion on the <a href="https://huggingface.co/spaces/lvwerra/distill-blog-template/discussions?status=open&type=discussion">Community tab</a>!</aside>
60
+
61
+ <p>The template is built with the following features in mind:</p>
62
+
63
+ <p><strong>1. Beautiful typography and layout</strong>: The template follows Distill's clean, readable design principles with responsive layouts that work well on desktop and mobile.</p>
64
+
65
+ <p><strong>2. Support for interactive components</strong>: You can include interactive visualizations, like the memory widget shown below:</p>
66
+
67
+ <h2>Getting Started with the Template</h2>
68
+
69
+ <p>Let's walk through how to use this template to create your own blog:</p>
70
+
71
+ <h3>Installation and Setup</h3>
72
+
73
+ <p>First, duplicate the repository and clone it to your local machine and install the dependencies:</p>
74
+
75
+
76
+ <d-code block language="bash">
77
+ git clone https://huggingface.co/spaces/lvwerra/distill-blog-template
78
+ cd distill-blog-template
79
+ npm install
80
+ </d-code>
81
+
82
+ <p>To run the development server:</p>
83
+
84
+ <d-code block language="bash">
85
+ npm run dev
86
+ </d-code>
87
+
88
+ <p>This will start a local server at <code> http://localhost:8080 </code> where you can preview your blog as you edit it.</p>
89
+
90
+ <h3>Editing the Front Matter</h3>
91
+
92
+ <p>The front matter contains metadata about your blog post, such as the title, description, and author information. Edit the <code>&lt;d-front-matter&gt;</code> section at the top of <code>src/index.html</code>:</p>
93
+
94
+ <d-code block language="html">&lt;d-front-matter&gt;
95
+ &lt;script id='distill-front-matter' type="text/json"&gt;{
96
+ "title": "Your Blog Title",
97
+ "description": "A brief description of your blog",
98
+ "published": "Month Day, Year",
99
+ "affiliation": {"name": "Your Organization"},
100
+ "authors": [
101
+ {
102
+ "author":"Your Name",
103
+ "authorURL":"https://your-website.com"
104
+ }
105
+ ],
106
+ "katex": {
107
+ "delimiters": [
108
+ {"left": "$$", "right": "$$", "display": false}
109
+ ]
110
+ }
111
+ }
112
+ &lt;/script&gt;
113
+ &lt;/d-front-matter&gt;</d-code>
114
+
115
+ <p>An the corresponding entries in <code>src/distill.js</code> in the <code>bylineTemplate</code>.</p>
116
+
117
+ <h3>Writing Your Content</h3>
118
+
119
+ <p>Your main content goes within the <code>&lt;d-article&gt;</code> tags. You can use standard HTML tags with some special Distill components:</p>
120
+
121
+ <h4>Mathematical Equations</h4>
122
+
123
+ <p>You can include inline math using <code>&lt;d-math&gt;x^2 + y^2 = z^2&lt;/d-math&gt;</code> which renders as <d-math>x^2 + y^2 = z^2</d-math>.</p>
124
+
125
+ <p>For block equations, use <code>&lt;d-math block&gt;E = mc^2&lt;/d-math&gt;</code>:</p>
126
+
127
+ <d-math block>E = mc^2</d-math>
128
+
129
+ <h4>Code</h4>
130
+
131
+ <p>It's great to include code samples in your blog post. In-line you can do it via the <code>&lt;code&gt;</code> tags. For code blocks you can use the <code>&lt;d-code block language="clike"&gt;</d-code></code> tags. Take the for example the following block:</p>
132
+
133
+ <d-code block language="html">
134
+ &lt;d-code block language="python"&gt;
135
+ def print_hello_world():
136
+ print("hello world")
137
+ &lt;/d-code&gt;
138
+ </d-code>
139
+
140
+ <p>It will render as:</p>
141
+
142
+ <d-code block language="python">
143
+ def print_hello_world():
144
+ print("hello world")
145
+ </d-code>
146
+
147
+ <p>Finally, if you want to include code from GitHub you can use <a href="www.emgithub.com">emgithub.com</a> and for example create a collapsable widget like this:</p>
148
+
149
+ <d-code block language="html">
150
+ &lt;details style="background: #f6f8fa; border: 1px solid #d0d7de; border-radius: 6px; margin: 1em 0;"&gt;
151
+ &lt;summary style="padding: 12px; cursor: pointer; user-select: none; background: #f3f4f6; border-bottom: 1px solid #d0d7de;"&gt;
152
+ 👉 Naive DP implementation with overlap in Picotron (Click to expand)
153
+ &lt;/summary&gt;
154
+ &lt;div class="code-embed-container" style="margin: 0; border-radius: 0; overflow-x: scroll; width: max-content; min-width: 100%; font-size: 8px;"&gt;&lt;/div&gt;
155
+ &lt;script
156
+ src="https://emgithub.com/embed-v2.js?target=https%3A%2F%2Fgithub.com%2Fhuggingface%2Fpicotron%2Fblob%2F0035cce0e04afd6192763b11efe50010d8ad0f71%2Fpicotron%2Fdata_parallel%2Fdata_parallel.py%23L10-L60&style=github&type=code&showBorder=off&showLineNumbers=on&showFileMeta=on&showCopy=on&showFullPath=on"&gt;
157
+ &lt;/script&gt;
158
+ &lt;/div&gt;
159
+ &lt;/details&gt;
160
+ </d-code>
161
+
162
+ <p>Which will display as follows:</p>
163
+
164
+ <details style="background: #f6f8fa; border: 1px solid #d0d7de; border-radius: 6px; margin: 1em 0;">
165
+ <summary style="padding: 12px; cursor: pointer; user-select: none; background: #f3f4f6; border-bottom: 1px solid #d0d7de;">
166
+ 👉 Naive DP implementation with overlap in Picotron (Click to expand)
167
+ </summary>
168
+ <div class="code-embed-container" style="margin: 0; border-radius: 0; overflow-x: scroll; width: max-content; min-width: 100%; font-size: 8px;"></div>
169
+ <script
170
+ src="https://emgithub.com/embed-v2.js?target=https%3A%2F%2Fgithub.com%2Fhuggingface%2Fpicotron%2Fblob%2F0035cce0e04afd6192763b11efe50010d8ad0f71%2Fpicotron%2Fdata_parallel%2Fdata_parallel.py%23L10-L60&style=github&type=code&showBorder=off&showLineNumbers=on&showFileMeta=on&showCopy=on&showFullPath=on">
171
+ </script>
172
+ </div>
173
+ </details>
174
+
175
+
176
+ <h4>Asides</h4>
177
+
178
+ <p>You can include side notes using the <code>&lt;aside&gt;...&lt;/aside&gt;</code> tag:</p>
179
+
180
+ <aside>This is a side note that will appear in the margin.</aside>
181
+
182
+ <h4>Figures and Images</h4>
183
+
184
+ <p>Include figures with captions:</p>
185
+
186
+ <d-code block language="html">
187
+ &lt;figure&gt;
188
+ &lt;img src="/assets/images/placeholder.png" alt="A placeholder image"&gt;
189
+ &lt;figcaption&gt;Caption for your figure&lt;/figcaption&gt;
190
+ &lt;/figure&gt;
191
+ </d-code>
192
+
193
+ <p>This will show the following:</p>
194
+
195
+ <figure>
196
+ <img src="/assets/images/placeholder.png" alt="A placeholder image">
197
+ <figcaption>Caption for your figure</figcaption>
198
+ </figure>
199
+
200
+ <h4>Tables</h4>
201
+
202
+ <p>You can easily add tables with the default HTML formatting:</p>
203
+
204
+ <d-code block language="html">
205
+ &lt;table&gt;
206
+ &lt;thead&gt;
207
+ &lt;tr&gt;
208
+ &lt;th&gt;&lt;strong&gt;Model&lt;/strong&gt;&lt;/th&gt;
209
+ &lt;th&gt;&lt;strong&gt;Accuracy&lt;/strong&gt;&lt;/th&gt;
210
+ &lt;th&gt;&lt;strong&gt;Speed&lt;/strong&gt;&lt;/th&gt;
211
+ &lt;/tr&gt;
212
+ &lt;/thead&gt;
213
+ &lt;tbody&gt;
214
+ &lt;tr&gt;
215
+ &lt;td&gt;Model A&lt;/td&gt;
216
+ &lt;td&gt;95%&lt;/td&gt;
217
+ &lt;td&gt;Fast&lt;/td&gt;
218
+ &lt;/tr&gt;
219
+ &lt;tr&gt;
220
+ &lt;td&gt;Model B&lt;/td&gt;
221
+ &lt;td&gt;98%&lt;/td&gt;
222
+ &lt;td&gt;Medium&lt;/td&gt;
223
+ &lt;/tr&gt;
224
+ &lt;tr&gt;
225
+ &lt;td&gt;Model C&lt;/td&gt;
226
+ &lt;td&gt;99%&lt;/td&gt;
227
+ &lt;td&gt;Slow&lt;/td&gt;
228
+ &lt;/tr&gt;
229
+ &lt;/tbody&gt;
230
+ &lt;/table&gt;
231
+ </d-code>
232
+
233
+ <p>This will render as a table as expected:</p>
234
+
235
+ <table>
236
+ <thead>
237
+ <tr>
238
+ <th><strong>Model</strong></th>
239
+ <th><strong>Accuracy</strong></th>
240
+ <th><strong>Speed</strong></th>
241
+ </tr>
242
+ </thead>
243
+ <tbody>
244
+ <tr>
245
+ <td>Model A</td>
246
+ <td>95%</td>
247
+ <td>Fast</td>
248
+ </tr>
249
+ <tr>
250
+ <td>Model B</td>
251
+ <td>98%</td>
252
+ <td>Medium</td>
253
+ </tr>
254
+ <tr>
255
+ <td>Model C</td>
256
+ <td>99%</td>
257
+ <td>Slow</td>
258
+ </tr>
259
+ </tbody>
260
+ </table>
261
+
262
+ <h4>Citations</h4>
263
+
264
+ <p>Citations can be included using <code>&lt;d-cite bibtex-key="example2023"&gt;&lt;/d-cite&gt;</code> tags and a separate bibliography file:</p>
265
+
266
+ <p>As shown in the paper<d-cite bibtex-key="example2023"></d-cite>, this approach has several advantages.</p>
267
+
268
+ <h3>Building and Deploying</h3>
269
+
270
+ <p>When you're ready to build your blog for production:</p>
271
+
272
+ <pre><code>npm run build</code></pre>
273
+
274
+ <p>This will create optimized files in the <code>dist/</code> directory. You can then commit and push the changes back to the space and see the result.</p>
275
+
276
+ <h2>Interactive Components</h2>
277
+
278
+ <p>You can for example include interactive Plotly visualizations in your blog by creating HTML fragments:</p>
279
+
280
+ <p>To generate these charts, run the included Python script:</p>
281
+
282
+ <d-code block language="bash">
283
+ pip install numpy plotly pandas
284
+ python plotly_example.py
285
+ </d-code>
286
+
287
+ <p>This creates HTML fragments in the <code>src/fragments/</code> directory that you can include in your blog using the <code>id</code> attribute matching the fragment name.</p>
288
+
289
+ <h3>Using HTML Fragments</h3>
290
+
291
+ <p>This template supports HTML fragments, which are useful for including complex visualizations or interactive components:</p>
292
+
293
+ <ol>
294
+ <li>Create a new HTML file in the <code>src/fragments/</code> directory, e.g., <code>src/fragments/my-visualization.html</code></li>
295
+ <li>Include it in your main document using <code>\{{{fragment-my-visualization}}}</code></li>
296
+ </ol>
297
+
298
+ <p>Fragments can contain HTML, CSS, and JavaScript. They're loaded asynchronously, which helps keep your main document lightweight.</p>
299
+
300
+
301
+ <h2>Advanced Customization</h2>
302
+
303
+ <h3>Styling</h3>
304
+
305
+ <p>You can customize the appearance of your blog by editing <code>src/style.css</code>. The template uses CSS variables for colors, fonts, and spacing, making it easy to adjust the overall look and feel.</p>
306
+
307
+ <h3>Adding JavaScript</h3>
308
+
309
+ <p>For interactive components, add your JavaScript code to <code>src/index.js</code> or create new modules and import them. The template uses webpack to bundle JavaScript files.</p>
310
+
311
+ <h3>Bibliography</h3>
312
+
313
+ <p>Edit <code>src/bibliography.bib</code> to include your citations in BibTeX format.</p>
314
+
315
+ <h2>Best Practices</h2>
316
+
317
+ <ul>
318
+ <li>Keep your content focused and concise</li>
319
+ <li>Use visualizations to explain complex concepts</li>
320
+ <li>Break down long articles into clear sections with descriptive headings</li>
321
+ <li>Use mathematics sparingly and always explain the notation</li>
322
+ <li>Optimize images and interactive components for fast loading</li>
323
+ </ul>
324
+
325
+ <h2>Conclusion</h2>
326
+
327
+ <p>This template gives you everything you need to create beautiful, interactive technical blogs. By combining clear writing with interactive visualizations and mathematical notation, you can effectively communicate complex ideas to your audience.</p>
328
+
329
+ <p>For more examples and inspiration, visit <a href="https://distill.pub/">distill.pub</a>.</p>
330
+
331
+
332
+
333
+ </d-article>
334
+
335
+ <d-appendix>
336
+ <d-bibliography src="bibliography.bib"></d-bibliography>
337
+ <style>
338
+ d-appendix .citation {
339
+ font-size: 11px;
340
+ line-height: 15px;
341
+ border-left: 1px solid rgba(0, 0, 0, 0.1);
342
+ padding-left: 18px;
343
+ border: 1px solid rgba(0, 0, 0, 0.1);
344
+ background: rgba(0, 0, 0, 0.02);
345
+ padding: 10px 18px;
346
+ border-radius: 3px;
347
+ color: rgba(150, 150, 150, 1);
348
+ overflow: hidden;
349
+ margin-top: -12px;
350
+ white-space: pre-wrap;
351
+ word-wrap: break-word;
352
+ }
353
+ </style>
354
+
355
+ <h3 id="citation">Citation</h3>
356
+ <p>For attribution in academic contexts, please cite this work as</p>
357
+ <pre
358
+ class="citation short">"The Distill Blog Template", 2025.</pre>
359
+ <p>BibTeX citation</p>
360
+ <pre class="citation long">@misc{distill_blog_template,
361
+ title={The Distill Blog Template},
362
+ author={Some Authors et al},
363
+ year={2025},
364
+ }</pre>
365
+ </d-appendix>
366
+ <script>
367
+ function toggleTOC() {
368
+ const content = document.querySelector('.toc-content');
369
+ const icon = document.querySelector('.toggle-icon');
370
+
371
+ content.classList.toggle('collapsed');
372
+ icon.classList.toggle('collapsed');
373
+ }
374
+ </script>
375
+
376
+ <script>
377
+ const article = document.querySelector('d-article');
378
+ const toc = document.querySelector('d-contents');
379
+ if (toc) {
380
+ const headings = article.querySelectorAll('h2, h3, h4');
381
+ // let ToC = `<nav role="navigation" class="l-text figcaption"><h3>Table of contents</h3>`;
382
+ let ToC = `<nav role="navigation" class="l-text figcaption"><div class="toc-header" onclick="toggleTOC()">
383
+ <span class="toc-title">Table of Contents</span>
384
+ <span class="toggle-icon">▼</span>
385
+ </div><div class="toc-content">`;
386
+ let prevLevel = 0;
387
+
388
+ for (const el of headings) {
389
+ // should element be included in TOC?
390
+ const isInTitle = el.parentElement.tagName == 'D-TITLE';
391
+ const isException = el.getAttribute('no-toc');
392
+ if (isInTitle || isException) continue;
393
+ el.setAttribute('id', el.textContent.toLowerCase().replaceAll(" ", "_"))
394
+ const link = '<a target="_self" href="' + '#' + el.getAttribute('id') + '">' + el.textContent + '</a>';
395
+
396
+ const level = el.tagName === 'H2' ? 0 : (el.tagName === 'H3' ? 1 : 2);
397
+ while (prevLevel < level) {
398
+ ToC += '<ul>'
399
+ prevLevel++;
400
+ }
401
+ while (prevLevel > level) {
402
+ ToC += '</ul>'
403
+ prevLevel--;
404
+ }
405
+ if (level === 0)
406
+ ToC += '<div>' + link + '</div>';
407
+ else
408
+ // else if (level === 1)
409
+ ToC += '<li>' + link + '</li>';
410
+ }
411
+
412
+ while (prevLevel > 0) {
413
+ ToC += '</ul>'
414
+ prevLevel--;
415
+ }
416
+ ToC += '</div></nav>';
417
+ toc.innerHTML = ToC;
418
+ toc.setAttribute('prerendered', 'true');
419
+ const toc_links = document.querySelectorAll('d-contents > nav div a');
420
+
421
+ window.addEventListener('scroll', (_event) => {
422
+ if (typeof (headings) != 'undefined' && headings != null && typeof (toc_links) != 'undefined' && toc_links != null) {
423
+ find_active: {
424
+ for (let i = headings.length - 1; i >= 0; i--) {
425
+ const heading = headings[i];
426
+ // Skip headings that shouldn't be in TOC
427
+ if (heading.parentElement.tagName == 'D-TITLE' || heading.getAttribute('no-toc')) {
428
+ continue;
429
+ }
430
+
431
+ if (heading.getBoundingClientRect().top - 50 <= 0) {
432
+ // Find matching TOC link by href
433
+ const headingId = heading.getAttribute('id');
434
+ const activeLink = Array.from(toc_links).find(link =>
435
+ link.getAttribute('href') === '#' + headingId
436
+ );
437
+
438
+ if (activeLink && !activeLink.classList.contains("active")) {
439
+ toc_links.forEach(link => link.classList.remove("active"));
440
+ activeLink.classList.add('active');
441
+ }
442
+ break find_active;
443
+ }
444
+ }
445
+ toc_links.forEach(link => link.classList.remove("active"));
446
+ }
447
+ }
448
+ });
449
+ }
450
+ </script>
451
+
452
+ </body>
453
+
454
+ </html>
src/index.js ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ // import { plotClusters } from './clusters'
2
+ import { loadFragments } from './fragmentLoader'
3
+ import { syncHFSpacesURLHash } from './syncHFSpacesURLHash'
4
+
5
+ document.addEventListener("DOMContentLoaded", () => {
6
+ console.log("DOMContentLoaded");
7
+ loadFragments();
8
+ init_memory_plot();
9
+ syncHFSpacesURLHash();
10
+ }, { once: true });
src/plotly_example.py ADDED
@@ -0,0 +1,82 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import plotly.graph_objects as go
2
+ import numpy as np
3
+ import pandas as pd
4
+
5
+ data = {
6
+ "x": 3*np.random.rand(512),
7
+ "y": np.random.rand(512),
8
+ "z": np.random.random(512),
9
+ }
10
+
11
+ df = pd.DataFrame(data)
12
+
13
+ def get_label(z):
14
+ if z<0.25:
15
+ return "smol dot"
16
+ if z<0.5:
17
+ return "ok-ish dot"
18
+ if z<0.75:
19
+ return "a dot"
20
+ else:
21
+ return "biiig dot"
22
+
23
+ df["label"] = df["z"].apply(get_label)
24
+
25
+ df["z"] = (df["z"]+1)*5
26
+
27
+ fig = go.Figure()
28
+
29
+ fig.add_trace(go.Scatter(
30
+ x=df['x'],
31
+ y=df['y'],
32
+ mode='markers',
33
+ marker=dict(
34
+ size=df['z'],
35
+ color=df['z'],
36
+ colorscale=[
37
+ [0, 'rgb(78, 165, 183)'], # Light blue
38
+ [0.5, 'rgb(206, 192, 250)'], # Purple
39
+ [1, 'rgb(232, 137, 171)'] # Pink
40
+ ],
41
+ opacity=0.9,
42
+ ),
43
+ customdata=df[["label"]],
44
+ hovertemplate="Dot category: %{customdata[0]}",
45
+ hoverlabel=dict(namelength=0),
46
+ showlegend=False
47
+ ))
48
+
49
+
50
+ fig.update_layout(
51
+ width=1200,
52
+ height=400,
53
+ paper_bgcolor='white',
54
+ plot_bgcolor='white',
55
+ showlegend=False,
56
+ margin=dict(l=0, r=0, t=0, b=0),
57
+ xaxis=dict(
58
+ showgrid=False,
59
+ zeroline=False,
60
+ showticklabels=False,
61
+ range=[0, 3]
62
+ ),
63
+ yaxis=dict(
64
+ showgrid=False,
65
+ zeroline=False,
66
+ showticklabels=False,
67
+ scaleanchor="x",
68
+ scaleratio=1,
69
+ range=[0, 1]
70
+ )
71
+ )
72
+
73
+ fig.show()
74
+
75
+ fig.write_html("fragments/banner.html",
76
+ include_plotlyjs=False,
77
+ full_html=False,
78
+ config={
79
+ 'displayModeBar': False,
80
+ 'responsive': True,
81
+ 'scrollZoom': False,
82
+ })
src/style.css ADDED
@@ -0,0 +1,599 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /* style.css */
2
+ /* Define colors */
3
+ :root {
4
+ --distill-gray: rgb(107, 114, 128);
5
+ --distill-gray-light: rgb(185, 185, 185);
6
+ --distill-gray-lighter: rgb(228, 228, 228);
7
+ --distill-gray-lightest: rgb(245, 245, 245);
8
+ --distill-blue: #007BFF;
9
+ }
10
+
11
+ /* Container for the controls */
12
+ [id^="plot-"] {
13
+ display: flex;
14
+ flex-direction: column;
15
+ align-items: center;
16
+ gap: 15px; /* Adjust the gap between controls as needed */
17
+ }
18
+ [id^="plot-"] figure {
19
+ margin-bottom: 0px;
20
+ margin-top: 0px;
21
+ padding: 0px;
22
+ }
23
+ .plotly_caption {
24
+ font-style: italic;
25
+ margin-top: 10px;
26
+ }
27
+
28
+ .plotly_controls {
29
+ display: flex;
30
+ flex-wrap: wrap;
31
+ flex-direction: row;
32
+ justify-content: center;
33
+ align-items: flex-start;
34
+ gap: 30px;
35
+ }
36
+
37
+
38
+ .plotly_input_container {
39
+ display: flex;
40
+ align-items: center;
41
+ flex-direction: column;
42
+ gap: 10px;
43
+ }
44
+
45
+ /* Style for the select dropdown */
46
+ .plotly_input_container > select {
47
+ padding: 2px 4px;
48
+ /* border: 1px solid #ccc; */
49
+ line-height: 1.5em;
50
+ text-align: center;
51
+ border-radius: 4px;
52
+ font-size: 12px;
53
+ background-color: var(--distill-gray-lightest);
54
+ outline: none;
55
+ }
56
+
57
+ /* Style for the range input */
58
+
59
+ .plotly_slider {
60
+ display: flex;
61
+ align-items: center;
62
+ gap: 10px;
63
+ }
64
+
65
+ .plotly_slider > input[type="range"] {
66
+ -webkit-appearance: none;
67
+ height: 2px;
68
+ background: var(--distill-gray-light);
69
+ border-radius: 5px;
70
+ outline: none;
71
+ }
72
+
73
+ .plotly_slider > span {
74
+ font-size: 14px;
75
+ line-height: 1.6em;
76
+ min-width: 16px;
77
+ }
78
+
79
+ .plotly_slider > input[type="range"]::-webkit-slider-thumb {
80
+ -webkit-appearance: none;
81
+ appearance: none;
82
+ width: 18px;
83
+ height: 18px;
84
+ border-radius: 50%;
85
+ background: var(--distill-blue);
86
+ cursor: pointer;
87
+ }
88
+
89
+ .plotly_slider > input[type="range"]::-moz-range-thumb {
90
+ width: 18px;
91
+ height: 18px;
92
+ border-radius: 50%;
93
+ background: var(--distill-blue);
94
+ cursor: pointer;
95
+ }
96
+
97
+ /* Style for the labels */
98
+ .plotly_input_container > label {
99
+ font-size: 14px;
100
+ font-weight: bold;
101
+ }
102
+
103
+ .main-plot-container {
104
+ margin-top: 21px;
105
+ margin-bottom: 35px;
106
+ }
107
+
108
+ .main-plot-container > figure {
109
+ display: block !important;
110
+ /* Let this be handled by graph-container */
111
+ margin-bottom: 0px;
112
+ margin-top: 0px;
113
+ }
114
+ .main-plot-container > div {
115
+ display: none !important;
116
+ }
117
+
118
+
119
+ @media (min-width: 768px) {
120
+ .main-plot-container > figure {
121
+ display: none !important;
122
+ }
123
+ .main-plot-container > div {
124
+ display: flex !important;
125
+ }
126
+ }
127
+
128
+ d-byline .byline {
129
+ grid-template-columns: 1fr;
130
+ grid-column: text;
131
+ font-size: 0.9rem;
132
+ line-height: 1.8em;
133
+ }
134
+
135
+ @media (min-width: 768px) {
136
+ d-byline .byline {
137
+ grid-template-columns: 5fr 1fr 1fr;
138
+ }
139
+ }
140
+
141
+ #title-plot {
142
+ margin-top: 0px;
143
+ margin-bottom: 0px;
144
+ }
145
+
146
+ d-contents > nav a.active {
147
+ text-decoration: underline;
148
+ }
149
+
150
+ @media (max-width: 1199px) {
151
+ d-contents {
152
+ display: none;
153
+ background: white;
154
+ justify-self: start;
155
+ align-self: start;
156
+ padding-bottom: 0.5em;
157
+ margin-bottom: 1em;
158
+ padding-left: 0.25em;
159
+ border-bottom: 1px solid rgba(0, 0, 0, 0.1);
160
+ border-bottom-width: 1px;
161
+ border-bottom-style: solid;
162
+ border-bottom-color: rgba(0, 0, 0, 0.1);
163
+ overflow-y: scroll;
164
+ height: calc(100vh - 40px);
165
+ scrollbar-width: none;
166
+ z-index: -100;
167
+ }
168
+ }
169
+
170
+ d-contents a:hover {
171
+ border-bottom: none;
172
+ }
173
+
174
+ toc-title {
175
+ font-weight: bold;
176
+ font-size: 1.2em;
177
+ color: #333;
178
+ }
179
+
180
+ toggle-icon {
181
+ transition: transform 0.3s;
182
+ }
183
+
184
+ toggle-icon.collapsed {
185
+ transform: rotate(90deg);
186
+ }
187
+
188
+ .toc-content {
189
+ margin-top: 15px;
190
+ overflow: hidden;
191
+ /* max-height: 1000px; */
192
+ transition: max-height 0.3s ease-out;
193
+ }
194
+
195
+ .toc-content.collapsed {
196
+ max-height: 0;
197
+ margin-top: 0;
198
+ }
199
+
200
+ @media (min-width: 1200px) {
201
+ d-article {
202
+ /* Ensure d-article does not prevent sticky positioning */
203
+ overflow: visible;
204
+ }
205
+
206
+ d-contents {
207
+ align-self: start;
208
+ background: white;
209
+ grid-column-start: 1 !important;
210
+ grid-column-end: 4 !important;
211
+ grid-row: auto / span 6;
212
+ justify-self: end;
213
+ margin-top: 0em;
214
+ padding-right: 3em;
215
+ padding-left: 2em;
216
+ /* border-right: 1px solid rgba(0, 0, 0, 0.1);
217
+ border-right-width: 1px;
218
+ border-right-style: solid;
219
+ border-right-color: rgba(0, 0, 0, 0.1); */
220
+ position: -webkit-sticky; /* For Safari */
221
+ position: sticky;
222
+ top: 10px; /* Adjust this value if needed */
223
+ overflow-y: auto;
224
+ height: calc(100vh - 40px);
225
+ scrollbar-width: none;
226
+ transition: max-height 0.3s ease-out;
227
+ z-index: -100;
228
+ }
229
+ }
230
+
231
+ d-contents nav h3 {
232
+ margin-top: 0;
233
+ margin-bottom: 1em;
234
+ }
235
+
236
+ d-contents nav div div {
237
+ color: rgba(0, 0, 0, 0.8);
238
+ font-weight: bold;
239
+ }
240
+
241
+ d-contents nav a {
242
+ color: rgba(0, 0, 0, 0.8);
243
+ border-bottom: none;
244
+ text-decoration: none;
245
+ }
246
+
247
+ d-contents li {
248
+ list-style-type: none;
249
+ }
250
+
251
+ d-contents ul, d-article d-contents ul {
252
+ padding-left: 1em;
253
+ }
254
+
255
+ d-contents nav ul li {
256
+ margin-bottom: .25em;
257
+ }
258
+
259
+ d-contents nav a:hover {
260
+ text-decoration: underline solid rgba(0, 0, 0, 0.6);
261
+ }
262
+
263
+ d-contents nav ul {
264
+ margin-top: 0;
265
+ margin-bottom: 6px;
266
+ }
267
+
268
+
269
+ d-contents nav > div {
270
+ display: block;
271
+ outline: none;
272
+ margin-bottom: 0.5em;
273
+ }
274
+
275
+ d-contents nav > div > a {
276
+ font-size: 13px;
277
+ font-weight: 600;
278
+ }
279
+
280
+ d-article aside {
281
+ margin-bottom: 1em;
282
+ }
283
+
284
+ d-article img {
285
+ max-width: 100%;
286
+ }
287
+
288
+ @media (min-width: 768px) {
289
+ d-article aside {
290
+ margin-bottom: 0;
291
+ }
292
+ }
293
+
294
+ d-contents nav > div > a:hover,
295
+ d-contents nav > ul > li > a:hover {
296
+ text-decoration: none;
297
+ }
298
+
299
+ .note-box {
300
+ background-color: #f6f8fa;
301
+ border-left: 4px solid #444444;
302
+ padding: 1rem;
303
+ margin: 1rem 0; /* Keep this modest margin */
304
+ border-radius: 6px;
305
+ /* Add this to ensure the box only takes up needed space */
306
+ display: inline-block;
307
+ width: 100%;
308
+ }
309
+
310
+ .note-box-title {
311
+ margin: 0;
312
+ color: #444444;
313
+ font-weight: 600;
314
+ font-size: 1em;
315
+ }
316
+
317
+ .note-box-content {
318
+ margin-top: 0.5rem;
319
+ margin-bottom: 0; /* Ensure no bottom margin */
320
+ color: #24292f;
321
+ font-size: 0.9em;
322
+ line-height: 1.5em;
323
+ }
324
+
325
+ /* For dark mode support */
326
+ @media (prefers-color-scheme: dark) {
327
+ .note-box {
328
+ background-color: #1c1c1c;
329
+ border-left-color: #888888;
330
+ }
331
+ .note-box-title {
332
+ color: #888888;
333
+ }
334
+ .note-box-content {
335
+ color: #d4d4d4;
336
+ }
337
+ }
338
+
339
+ d-article {
340
+ font-size: 1.0em;
341
+ }
342
+
343
+ .figure-legend {
344
+ font-size: 0.9em;
345
+ font-style: italic;
346
+ color: var(--distill-gray);
347
+ line-height: 1.5em;
348
+ }
349
+
350
+ d-code {
351
+ font-size: 12px;
352
+ }
353
+
354
+ .large-image-background {
355
+ width: 100vw;
356
+ padding-top: 10px;
357
+ padding-bottom: 10px;
358
+ margin-left: calc(-50vw + 50%);
359
+ margin-right: calc(-50vw + 50%);
360
+ background: white;
361
+ height: fit-content; /* This will make it match the image height */
362
+ display: flex;
363
+ justify-content: center; /* This will center your image */
364
+ }
365
+
366
+ .large-image-background-transparent {
367
+ /* width: 100vw; */
368
+ padding-top: 10px;
369
+ padding-bottom: 10px;
370
+ /* margin-left: calc(-50vw + 50%); */
371
+ margin-left:-100px;
372
+ margin-right: -100px;
373
+ /* margin-right: calc(-50vw + 50%); */
374
+ /* background: white; */
375
+ height: fit-content; /* This will make it match the image height */
376
+ display: flex;
377
+ justify-content: center; /* This will center your image */
378
+ }
379
+
380
+ .boxed-image {
381
+ padding: 0.5rem;
382
+ background: white;
383
+ border-radius: 12px;
384
+ border: 1px solid #e5e7eb;
385
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
386
+ }
387
+
388
+ d-article li {
389
+ margin-bottom: 0.0em;
390
+ }
391
+
392
+ d-article ul ul {
393
+ margin-bottom: 0.0em;
394
+ }
395
+
396
+ d-article ol ol {
397
+ margin-bottom: 0.0em;
398
+ }
399
+
400
+ d-article hr {
401
+ grid-column: text;
402
+ }
403
+
404
+ /* Memory visualization */
405
+ #graph-all {
406
+ min-width: 500px;
407
+ margin-right: 10px;
408
+ margin-bottom: 2rem;
409
+ padding: 0.5rem;
410
+ background: #f9fafb;
411
+ border-radius: 12px;
412
+ border: 1px solid #e5e7eb;
413
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
414
+ }
415
+
416
+
417
+ /* Main container styles */
418
+ #controls {
419
+ max-width: 1200px;
420
+ /* margin: 2rem auto; */
421
+ margin-bottom: 2rem;
422
+ margin-left: 10px;
423
+ padding: 0.6rem;
424
+ background: #f9fafb;
425
+ border-radius: 12px;
426
+ border: 1px solid #e5e7eb;
427
+ box-shadow: 0 4px 6px rgba(0, 0, 0, 0.1);
428
+ }
429
+
430
+ /* Grid layout */
431
+ #controls {
432
+ display: grid;
433
+ grid-template-columns: 1fr 1fr;
434
+ /* gap: 2rem; */
435
+ }
436
+
437
+ /* Cell styles */
438
+ .cell {
439
+ margin-bottom: 0.2rem;
440
+ }
441
+
442
+ /* Label styles */
443
+ label {
444
+ display: block;
445
+ /* margin-bottom: 0.5rem; */
446
+ font-size: 0.8rem;
447
+ font-weight: 500;
448
+ color: #374151;
449
+ }
450
+
451
+ /* Input container for range + number combination */
452
+ .input-container {
453
+ display: flex;
454
+ gap: 1rem;
455
+ align-items: center;
456
+ }
457
+
458
+ /* Range input styling */
459
+ input[type="range"] {
460
+ flex: 1;
461
+ height: 6px;
462
+ background: #e5e7eb;
463
+ border-radius: 3px;
464
+ appearance: none;
465
+ outline: none;
466
+ }
467
+
468
+ input[type="range"]::-webkit-slider-thumb {
469
+ appearance: none;
470
+ width: 16px;
471
+ height: 16px;
472
+ background: #3b82f6;
473
+ border-radius: 50%;
474
+ cursor: pointer;
475
+ transition: background 0.15s ease;
476
+ }
477
+
478
+ input[type="range"]::-webkit-slider-thumb:hover {
479
+ background: #2563eb;
480
+ }
481
+
482
+ /* Number input styling */
483
+ input[type="number"] {
484
+ width: 80px;
485
+ padding: 0.5rem;
486
+ border: 1px solid #e5e7eb;
487
+ border-radius: 6px;
488
+ font-size: 0.9rem;
489
+ color: #374151;
490
+ }
491
+
492
+ /* Select styling */
493
+ select {
494
+ width: 100%;
495
+ padding: 0.5rem;
496
+ border: 1px solid #e5e7eb;
497
+ border-radius: 6px;
498
+ background: white;
499
+ font-size: 0.9rem;
500
+ color: #374151;
501
+ cursor: pointer;
502
+ }
503
+
504
+ /* Checkbox styling */
505
+ input[type="checkbox"] {
506
+ width: 1.2rem;
507
+ height: 1.2rem;
508
+ margin-right: 0.5rem;
509
+ border: 2px solid #e5e7eb;
510
+ border-radius: 4px;
511
+ cursor: pointer;
512
+ }
513
+
514
+ /* Column specific styles */
515
+ .column-1 {
516
+ padding-right: 0.5rem;
517
+ }
518
+
519
+ .column-2 {
520
+ padding-left: 0.5rem;
521
+ }
522
+
523
+ /* Checkbox container */
524
+ .checkbox-container {
525
+ display: flex;
526
+ align-items: center;
527
+ margin-bottom: 1rem;
528
+ }
529
+
530
+ /* Memory visualization styles */
531
+ .memory-block {
532
+ background: #fff;
533
+ border-radius: 8px;
534
+ padding: 1rem;
535
+ margin-bottom: 1rem;
536
+ box-shadow: 0 2px 4px rgba(0, 0, 0, 0.05);
537
+ }
538
+
539
+ .memory-title {
540
+ font-size: 1.1rem;
541
+ font-weight: 500;
542
+ color: #374151;
543
+ margin-bottom: 0.5rem;
544
+ }
545
+
546
+ .memory-value {
547
+ font-size: 1.5rem;
548
+ font-weight: 600;
549
+ color: #3b82f6;
550
+ }
551
+
552
+ /* Responsive adjustments */
553
+ @media (max-width: 768px) {
554
+ #controls {
555
+ grid-template-columns: 1fr;
556
+ padding: 1rem;
557
+ }
558
+
559
+ .column-1, .column-2 {
560
+ padding: 0;
561
+ }
562
+ }
563
+
564
+ /* Hover states and transitions */
565
+ input:hover, select:hover {
566
+ border-color: #3b82f6;
567
+ }
568
+
569
+ input:focus, select:focus {
570
+ border-color: #2563eb;
571
+ outline: none;
572
+ box-shadow: 0 0 0 2px rgba(59, 130, 246, 0.1);
573
+ }
574
+
575
+ /* Add smooth transitions */
576
+ input, select, button {
577
+ transition: all 0.15s ease;
578
+ }
579
+
580
+ /* Preset dropdown special styling */
581
+ select[name="presets"] {
582
+ background-color: #f3f4f6;
583
+ font-weight: 500;
584
+ }
585
+
586
+ /* Memory graph enhancements */
587
+ .activation-memory {
588
+ background: #dbeafe;
589
+ padding: 1rem;
590
+ border-radius: 8px;
591
+ margin-bottom: 1rem;
592
+ }
593
+
594
+ .gradient-memory {
595
+ background: #ede9fe;
596
+ padding: 1rem;
597
+ border-radius: 8px;
598
+ }
599
+
src/syncHFSpacesURLHash.js ADDED
@@ -0,0 +1,124 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const queryArg = "section";
2
+
3
+ function syncHFSpacesURLHash() {
4
+ // Handle explicit section requests (don't update hash automatically on load)
5
+ const hasExplicitRequest = handleExplicitSectionRequest();
6
+
7
+ // Set up hash change monitoring
8
+ updateHashBasedOnHashChange();
9
+
10
+ // Always set up scroll monitoring to update hash during scrolling
11
+ setupScrollMonitoring();
12
+
13
+ // If no explicit request, we don't update the hash on initial load
14
+ // The hash will only start updating when the user scrolls
15
+ }
16
+
17
+ function handleExplicitSectionRequest() {
18
+ // Check for section parameter in URL
19
+ const urlParams = new URLSearchParams(window.location.search);
20
+ const sectionId = urlParams.get(queryArg);
21
+
22
+ // If we have an explicit section request
23
+ if (sectionId) {
24
+ const targetElement = document.getElementById(sectionId);
25
+ if (targetElement) {
26
+ // Slight delay to ensure the browser doesn't try to do its own scrolling first
27
+ setTimeout(() => {
28
+ targetElement.scrollIntoView();
29
+ history.replaceState(null, null, `#${sectionId}`);
30
+ }, 100);
31
+ }
32
+ return true;
33
+ }
34
+
35
+ // No explicit section parameter found
36
+ return false;
37
+ }
38
+
39
+ function setupScrollMonitoring() {
40
+ // Variables to manage throttling
41
+ let isScrolling = false;
42
+ let lastKnownScrollPosition = 0;
43
+ let initialScroll = true;
44
+
45
+ // Add the scroll event listener
46
+ window.addEventListener('scroll', function() {
47
+ lastKnownScrollPosition = window.scrollY;
48
+
49
+ if (!isScrolling) {
50
+ window.requestAnimationFrame(function() {
51
+ // Skip the first scroll event which might be browser's automatic scroll
52
+ // to a hash on page load
53
+ if (initialScroll) {
54
+ initialScroll = false;
55
+ } else {
56
+ updateHashBasedOnScroll(lastKnownScrollPosition);
57
+ }
58
+ isScrolling = false;
59
+ });
60
+ }
61
+
62
+ isScrolling = true;
63
+ });
64
+ }
65
+
66
+ // Function to update the URL hash based on scroll position
67
+ function updateHashBasedOnScroll(scrollPosition) {
68
+ const closestHeading = findClosestHeading(scrollPosition);
69
+
70
+ // Update the URL hash if we found a closest element
71
+ if (closestHeading && closestHeading.id) {
72
+ // Only update if the hash is different to avoid unnecessary operations
73
+ if (window.location.hash !== `#${closestHeading.id}`) {
74
+ silentlyUpdateHash(closestHeading.id);
75
+ postMessageToHFSpaces(closestHeading.id);
76
+ }
77
+ }
78
+ }
79
+
80
+ // Find the closest heading to the current scroll position
81
+ function findClosestHeading(scrollPosition) {
82
+ // Get only heading elements with IDs that we want to track
83
+ const headingsWithIds = Array.from(document.querySelectorAll('h1[id], h2[id], h3[id], h4[id], h5[id], h6[id]'));
84
+
85
+ // Skip if there are no headings with IDs
86
+ if (headingsWithIds.length === 0) return null;
87
+
88
+ // Find the element closest to the middle of the viewport
89
+ let closestHeading = null;
90
+ let closestDistance = Infinity;
91
+ const viewportMiddle = scrollPosition + window.innerHeight / 2;
92
+
93
+ // Iterate through all headings to find the closest one
94
+ headingsWithIds.forEach(heading => {
95
+ const headingTop = heading.getBoundingClientRect().top + scrollPosition;
96
+ const distance = Math.abs(headingTop - viewportMiddle);
97
+
98
+ if (distance < closestDistance) {
99
+ closestDistance = distance;
100
+ closestHeading = heading;
101
+ }
102
+ });
103
+
104
+ return closestHeading;
105
+ }
106
+
107
+ // Update hash without triggering scroll or other side effects
108
+ function silentlyUpdateHash(id) {
109
+ history.replaceState(null, null, `#${id}`);
110
+ }
111
+
112
+ function updateHashBasedOnHashChange() {
113
+ window.addEventListener('hashchange', () => {
114
+ const elementId = window.location.hash.slice(1);
115
+ postMessageToHFSpaces(elementId);
116
+ });
117
+ }
118
+
119
+ function postMessageToHFSpaces(elementId) {
120
+ const parentOrigin = "https://huggingface.co";
121
+ window.parent.postMessage({ queryString: `${queryArg}=${elementId}` }, parentOrigin);
122
+ }
123
+
124
+ export { syncHFSpacesURLHash };
thumbnail.png ADDED

Git LFS Details

  • SHA256: 015ba6cf403de23d44502d54326e509f6d215a03c285b3031387b09e7f2ce22d
  • Pointer size: 131 Bytes
  • Size of remote file: 246 kB
webpack.config.js ADDED
@@ -0,0 +1,153 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ const path = require("path");
2
+ const { CleanWebpackPlugin } = require("clean-webpack-plugin");
3
+ const CopyPlugin = require("copy-webpack-plugin");
4
+ const BundleAnalyzerPlugin = require("webpack-bundle-analyzer").BundleAnalyzerPlugin;
5
+ const Handlebars = require("handlebars");
6
+ const fs = require("fs");
7
+ const ImageMinimizerPlugin = require("image-minimizer-webpack-plugin");
8
+ const HtmlMinimizerPlugin = require("html-minimizer-webpack-plugin");
9
+
10
+
11
+ const FRAGMENTS_PATH = "src/fragments";
12
+
13
+ // Load the fragments from the fragments directory and caches it
14
+ const loadFragmentsMap = (() => {
15
+ let cachedFragments = null;
16
+ return async () => {
17
+ if (cachedFragments === null) {
18
+ cachedFragments = {};
19
+ const walkDir = async (dir, basePath = '') => {
20
+ const files = fs.readdirSync(dir);
21
+ await Promise.all(files.map(async file => {
22
+ const filePath = path.join(dir, file);
23
+ const relativePath = path.join(basePath, file);
24
+ if (fs.statSync(filePath).isDirectory()) {
25
+ await walkDir(filePath, relativePath);
26
+ } else {
27
+ // Remove the .html extension before creating the dotted path
28
+ const nameWithoutExt = relativePath.replace(/\.html$/, '');
29
+ const dottedPath = 'fragment-' + nameWithoutExt.replace(/\\/g, '-').replace(/\//g, '-').replace(/\./g, '-');
30
+ const content = fs.readFileSync(filePath, "utf8");
31
+ // Minify the HTML content using swcMinifyFragment
32
+ const minifiedRes = await HtmlMinimizerPlugin.swcMinifyFragment({"tmp.html": content})
33
+ if (minifiedRes.errors) {
34
+ console.error(minifiedRes.errors)
35
+ }
36
+ const minifiedContent = minifiedRes.code;
37
+ cachedFragments[dottedPath] = minifiedContent;
38
+ }
39
+ }));
40
+ };
41
+ await walkDir(FRAGMENTS_PATH);
42
+ }
43
+ return cachedFragments;
44
+ };
45
+ })();
46
+
47
+ const transformHandlebars = async (data, path) => {
48
+ const fragments = await loadFragmentsMap();
49
+ console.log(`Available fragments: ${Object.keys(fragments).join(', ')}`);
50
+ // Load the template file
51
+ const template = Handlebars.compile(data.toString('utf8'));
52
+ const html = template(fragments);
53
+ return html;
54
+ };
55
+
56
+ module.exports = {
57
+ entry: {
58
+ distill: "./src/distill.js",
59
+ main: "./src/index.js",
60
+ },
61
+ output: {
62
+ filename: "[name].bundle.js", // The output file
63
+ path: path.resolve(__dirname, "dist"), // Output directory
64
+ },
65
+ module: {
66
+ rules: [
67
+ { test: /\.css$/, use: ["style-loader", "css-loader"] },
68
+ {
69
+ test: /\.(js|mjs)$/,
70
+ exclude: /node_modules/,
71
+ use: {
72
+ loader: "babel-loader",
73
+ options: {
74
+ presets: ["@babel/preset-env"],
75
+ },
76
+ },
77
+ },
78
+ {}
79
+ ],
80
+ },
81
+ plugins: [
82
+ new CleanWebpackPlugin(),
83
+ new CopyPlugin({
84
+ patterns: [
85
+ {
86
+ from: "assets",
87
+ to: "assets",
88
+ },
89
+ { from: "src/fragments/*", to: "fragments/[name].html" },
90
+ { from: "src/style.css", to: "style.css" },
91
+ { from: "src/bibliography.bib", to: "bibliography.bib" },
92
+ {
93
+ from: "src/index.html",
94
+ to: "index.html",
95
+ transform: transformHandlebars,
96
+ },
97
+ ],
98
+ }),
99
+ ],
100
+ devtool: process.env.NODE_ENV === 'production' ? 'source-map' : 'eval-source-map',
101
+ devServer: {
102
+ static: "./dist", // Serve files from the 'dist' directory
103
+ open: process.env.NODE_ENV !== 'production', // Automatically open the browser unless in production
104
+ hot: process.env.NODE_ENV !== 'production', // Enable hot module replacement unless in production
105
+ },
106
+ mode: process.env.NODE_ENV === 'production' ? 'production' : 'development',
107
+ optimization: {
108
+ minimizer: [
109
+ // Hynek: Ideally we would convert all images to webp and just use webp, but I don't have time
110
+ // to write script which would also modify html to reflect the new extensions.
111
+ new ImageMinimizerPlugin({
112
+ minimizer: [{
113
+ implementation: ImageMinimizerPlugin.sharpMinify,
114
+ options: {
115
+ encodeOptions: {
116
+ // For JPG
117
+ jpeg: {
118
+ quality: 80
119
+ },
120
+ // For PNG
121
+ png: {
122
+ quality: 80
123
+ },
124
+ // For WebP
125
+ webp: {
126
+ quality: 80
127
+ }
128
+ }
129
+ }
130
+ },
131
+ {
132
+ implementation: ImageMinimizerPlugin.svgoMinify,
133
+ options: {
134
+ encodeOptions: {
135
+ multipass: true,
136
+ plugins: [
137
+ 'preset-default',
138
+ ]
139
+ }
140
+ }
141
+ }
142
+ ]
143
+ }),
144
+ //Hynek: Ideally we don't run this twice but we
145
+ new HtmlMinimizerPlugin({
146
+ test: /fragments\/.*\.html$/i,
147
+ minify: HtmlMinimizerPlugin.swcMinifyFragment,
148
+ })
149
+ ]
150
+ },
151
+ };
152
+
153
+ console.log(process.env.NODE_ENV)