diff --git a/examples/benchmarks/TRA/README.md b/examples/benchmarks/TRA/README.md
new file mode 100644
index 0000000000..070527ddb4
--- /dev/null
+++ b/examples/benchmarks/TRA/README.md
@@ -0,0 +1,81 @@
+# Learning Multiple Stock Trading Patterns with Temporal Routing Adaptor and Optimal Transport
+
+This code provides a PyTorch implementation for TRA (Temporal Routing Adaptor), as described in the paper [Learning Multiple Stock Trading Patterns with Temporal Routing Adaptor and Optimal Transport](http://arxiv.org/abs/2106.12950).
+
+* TRA (Temporal Routing Adaptor) is a lightweight module that consists of a set of independent predictors for learning multiple patterns as well as a router to dispatch samples to different predictors.
+* We also design a learning algorithm based on Optimal Transport (OT) to obtain the optimal sample to predictor assignment and effectively optimize the router with such assignment through an auxiliary loss term.
+
+
+# Running TRA
+
+## Requirements
+- Install `Qlib` main branch
+
+## Running
+
+We attach our running scripts for the paper in `run.sh`.
+
+And here are two ways to run the model:
+
+* Running from scripts with default parameters
+ You can directly run from Qlib command `qrun`:
+ ```
+ qrun configs/config_alstm.yaml
+ ```
+
+* Running from code with self-defined parameters
+ Setting different parameters is also allowed. See codes in `example.py`:
+ ```
+ python example.py --config_file configs/config_alstm.yaml
+ ```
+
+Here we trained TRA on a pretrained backbone model. Therefore we run `*_init.yaml` before TRA's scipts.
+
+# Results
+
+## Outputs
+
+After running the scripts, you can find result files in path `./output`:
+
+`info.json` - config settings and result metrics.
+
+`log.csv` - running logs.
+
+`model.bin` - the model parameter dictionary.
+
+`pred.pkl` - the prediction scores and output for inference.
+
+## Our Results
+| Methods | MSE| MAE| IC | ICIR | AR | AV | SR | MDD |
+|-------------------|-------------------|---------------------|--------------------|--------------------|--------------------|--------------------|--------------------|--------------------|
+|Linear|0.163|0.327|0.020|0.132|-3.2%|16.8%|-0.191|32.1%|
+|LightGBM|0.160(0.000)|0.323(0.000)|0.041|0.292|7.8%|15.5%|0.503|25.7%|
+|MLP|0.160(0.002)|0.323(0.003)|0.037|0.273|3.7%|15.3%|0.264|26.2%|
+|SFM|0.159(0.001) |0.321(0.001) |0.047 |0.381 |7.1% |14.3% |0.497 |22.9%|
+|ALSTM|0.158(0.001) |0.320(0.001) |0.053 |0.419 |12.3% |13.7% |0.897 |20.2%|
+|Trans.|0.158(0.001) |0.322(0.001) |0.051 |0.400 |14.5% |14.2% |1.028 |22.5%|
+|ALSTM+TS|0.160(0.002) |0.321(0.002) |0.039 |0.291 |6.7% |14.6% |0.480|22.3%|
+|Trans.+TS|0.160(0.004) |0.324(0.005) |0.037 |0.278 |10.4% |14.7% |0.722 |23.7%|
+|ALSTM+TRA(Ours)|0.157(0.000) |0.318(0.000) |0.059 |0.460 |12.4% |14.0% |0.885 |20.4%|
+|Trans.+TRA(Ours)|0.157(0.000) |0.320(0.000) |0.056 |0.442 |16.1% |14.2% |1.133 |23.1%|
+
+A more detailed demo for our experiment results in the paper can be found in `Report.ipynb`.
+
+# Common Issues
+
+For help or issues using TRA, please submit a GitHub issue.
+
+Sometimes we might encounter situation where the loss is `NaN`, please check the `epsilon` parameter in the sinkhorn algorithm, adjusting the `epsilon` according to input's scale is important.
+
+# Citation
+If you find this repository useful in your research, please cite:
+```
+@inproceedings{HengxuKDD2021,
+ author = {Hengxu Lin and Dong Zhou and Weiqing Liu and Jiang Bian},
+ title = {Learning Multiple Stock Trading Patterns with Temporal Routing Adaptor and Optimal Transport},
+ booktitle = {Proceedings of the 27th ACM SIGKDD Conference on Knowledge Discovery \& Data Mining},
+ series = {KDD '21},
+ year = {2021},
+ publisher = {ACM},
+}
+```
diff --git a/examples/benchmarks/TRA/Reports.ipynb b/examples/benchmarks/TRA/Reports.ipynb
new file mode 100644
index 0000000000..ee172d97e1
--- /dev/null
+++ b/examples/benchmarks/TRA/Reports.ipynb
@@ -0,0 +1,796 @@
+{
+ "cells": [
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Overview\n",
+ "\n",
+ "This notebook contains all experiment results exhibited in our paper."
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 1,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "%matplotlib inline\n",
+ "import glob\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import json\n",
+ "import numpy as np\n",
+ "import pandas as pd\n",
+ "import seaborn as sns\n",
+ "import matplotlib.pyplot as plt\n",
+ "import matplotlib\n",
+ "sns.set(style='white')\n",
+ "matplotlib.rcParams['pdf.fonttype'] = 42\n",
+ "matplotlib.rcParams['ps.fonttype'] = 42\n",
+ "\n",
+ "from tqdm.auto import tqdm\n",
+ "from joblib import Parallel, delayed\n",
+ "\n",
+ "def func(x, N=80):\n",
+ " ret = x.ret.copy()\n",
+ " x = x.rank(pct=True)\n",
+ " x['ret'] = ret\n",
+ " diff = x.score.sub(x.label)\n",
+ " r = x.nlargest(N, columns='score').ret.mean()\n",
+ " r -= x.nsmallest(N, columns='score').ret.mean()\n",
+ " return pd.Series({\n",
+ " 'MSE': diff.pow(2).mean(), \n",
+ " 'MAE': diff.abs().mean(), \n",
+ " 'IC': x.score.corr(x.label),\n",
+ " 'R': r\n",
+ " })\n",
+ " \n",
+ "ret = pd.read_pickle(\"data/ret.pkl\").clip(-0.1, 0.1)\n",
+ "def backtest(fname, **kwargs):\n",
+ " pred = pd.read_pickle(fname).loc['2018-09-21':'2020-06-30'] # test period\n",
+ " pred['ret'] = ret\n",
+ " dates = pred.index.unique(level=0)\n",
+ " res = Parallel(n_jobs=-1)(delayed(func)(pred.loc[d], **kwargs) for d in dates)\n",
+ " res = {\n",
+ " dates[i]: res[i]\n",
+ " for i in range(len(dates))\n",
+ " }\n",
+ " res = pd.DataFrame(res).T\n",
+ " r = res['R'].copy()\n",
+ " r.index = pd.to_datetime(r.index)\n",
+ " r = r.reindex(pd.date_range(r.index[0], r.index[-1])).fillna(0) # paper use 365 days\n",
+ " return {\n",
+ " 'MSE': res['MSE'].mean(),\n",
+ " 'MAE': res['MAE'].mean(),\n",
+ " 'IC': res['IC'].mean(),\n",
+ " 'ICIR': res['IC'].mean()/res['IC'].std(),\n",
+ " 'AR': r.mean()*365,\n",
+ " 'AV': r.std()*365**0.5,\n",
+ " 'SR': r.mean()/r.std()*365**0.5,\n",
+ " 'MDD': (r.cumsum().cummax() - r.cumsum()).max()\n",
+ " }, r\n",
+ "\n",
+ "def fmt(x, p=3, scale=1, std=False):\n",
+ " _fmt = '{:.%df}'%p\n",
+ " string = _fmt.format((x.mean() if not isinstance(x, (float, np.floating)) else x) * scale)\n",
+ " if std and len(x) > 1:\n",
+ " string += ' ('+_fmt.format(x.std()*scale)+')'\n",
+ " return string\n",
+ "\n",
+ "def backtest_multi(files, **kwargs):\n",
+ " res = []\n",
+ " pnl = []\n",
+ " for fname in files:\n",
+ " metric, r = backtest(fname, **kwargs)\n",
+ " res.append(metric)\n",
+ " pnl.append(r)\n",
+ " res = pd.DataFrame(res)\n",
+ " pnl = pd.concat(pnl, axis=1)\n",
+ " return {\n",
+ " 'MSE': fmt(res['MSE'], std=True),\n",
+ " 'MAE': fmt(res['MAE'], std=True),\n",
+ " 'IC': fmt(res['IC']),\n",
+ " 'ICIR': fmt(res['ICIR']),\n",
+ " 'AR': fmt(res['AR'], scale=100, p=1)+'%',\n",
+ " 'VR': fmt(res['AV'], scale=100, p=1)+'%',\n",
+ " 'SR': fmt(res['SR']),\n",
+ " 'MDD': fmt(res['MDD'], scale=100, p=1)+'%'\n",
+ " }, pnl"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# Preparation\n",
+ "\n",
+ "\n",
+ "You could prepare the source data as below for the backtest code:\n",
+ "1. Linear: see Qlib examples\n",
+ "2. LightGBM: see Qlib examples\n",
+ "3. MLP: see Qlib examples\n",
+ "4. SFM: see Qlib examples\n",
+ "5. ALSTM: `qrun` configs/config_alstm.yaml\n",
+ "6. Transformer: `qrun` configs/config_transformer.yaml\n",
+ "7. ALSTM+TRA: `qrun` configs/config_alstm_tra_init.yaml && `qrun` configs/config_alstm_tra.yaml\n",
+ "8. Tranformer+TRA: `qrun` configs/config_transformer_tra_init.yaml && `qrun` configs/config_transformer_tra.yaml"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 2,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "exps = {\n",
+ " 'Linear': ['output/Linear/pred.pkl'],\n",
+ " 'LightGBM': ['output/GBDT/lr0.05_leaves128/pred.pkl'],\n",
+ " 'MLP': glob.glob('output/search/MLP/hs128_bs512_do0.3_lr0.001_seed*/pred.pkl'),\n",
+ " 'SFM': glob.glob('output/search/SFM/hs32_bs512_do0.5_lr0.001_seed*/pred.pkl'),\n",
+ " 'ALSTM': glob.glob('output/search/LSTM_Attn/hs256_bs1024_do0.1_lr0.0002_seed*/pred.pkl'),\n",
+ " 'Trans.': glob.glob('output/search/Transformer/head4_hs64_bs1024_do0.1_lr0.0002_seed*/pred.pkl'),\n",
+ " 'ALSTM+TS':glob.glob('output/LSTM_Attn_TS/hs256_bs1024_do0.1_lr0.0002_seed*/pred.pkl'),\n",
+ " 'Trans.+TS':glob.glob('output/Transformer_TS/head4_hs64_bs1024_do0.1_lr0.0002_seed*/pred.pkl'),\n",
+ " 'ALSTM+TRA(Ours)': glob.glob('output/search/finetune/LSTM_Attn_tra/K10_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/pred.pkl'),\n",
+ " 'Trans.+TRA(Ours)': glob.glob('output/search/finetune/Transformer_tra/K3_traHs16_traSrcLR_TPE_traLamb1.0_head4_hs64_bs512_do0.1_lr0.0005_seed*/pred.pkl')\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 3,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "0acd535e05944e539fd001009ed0748d",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ " 0%| | 0/10 [00:00, ?it/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "res = {\n",
+ " name: backtest_multi(exps[name])\n",
+ " for name in tqdm(exps)\n",
+ "}\n",
+ "report = pd.DataFrame({\n",
+ " k: v[0]\n",
+ " for k, v in res.items()\n",
+ "}).T"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 4,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "
\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " MSE | \n",
+ " MAE | \n",
+ " IC | \n",
+ " ICIR | \n",
+ " AR | \n",
+ " VR | \n",
+ " SR | \n",
+ " MDD | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " Linear | \n",
+ " 0.163 | \n",
+ " 0.327 | \n",
+ " 0.020 | \n",
+ " 0.132 | \n",
+ " -3.2% | \n",
+ " 16.8% | \n",
+ " -0.191 | \n",
+ " 32.1% | \n",
+ "
\n",
+ " \n",
+ " LightGBM | \n",
+ " 0.160 | \n",
+ " 0.323 | \n",
+ " 0.041 | \n",
+ " 0.292 | \n",
+ " 7.8% | \n",
+ " 15.5% | \n",
+ " 0.503 | \n",
+ " 25.7% | \n",
+ "
\n",
+ " \n",
+ " MLP | \n",
+ " 0.160 (0.002) | \n",
+ " 0.323 (0.003) | \n",
+ " 0.037 | \n",
+ " 0.273 | \n",
+ " 3.7% | \n",
+ " 15.3% | \n",
+ " 0.264 | \n",
+ " 26.2% | \n",
+ "
\n",
+ " \n",
+ " SFM | \n",
+ " 0.159 (0.001) | \n",
+ " 0.321 (0.001) | \n",
+ " 0.047 | \n",
+ " 0.381 | \n",
+ " 7.1% | \n",
+ " 14.3% | \n",
+ " 0.497 | \n",
+ " 22.9% | \n",
+ "
\n",
+ " \n",
+ " ALSTM | \n",
+ " 0.158 (0.001) | \n",
+ " 0.320 (0.001) | \n",
+ " 0.053 | \n",
+ " 0.419 | \n",
+ " 12.3% | \n",
+ " 13.7% | \n",
+ " 0.897 | \n",
+ " 20.2% | \n",
+ "
\n",
+ " \n",
+ " Trans. | \n",
+ " 0.158 (0.001) | \n",
+ " 0.322 (0.001) | \n",
+ " 0.051 | \n",
+ " 0.400 | \n",
+ " 14.5% | \n",
+ " 14.2% | \n",
+ " 1.028 | \n",
+ " 22.5% | \n",
+ "
\n",
+ " \n",
+ " ALSTM+TS | \n",
+ " 0.160 (0.002) | \n",
+ " 0.321 (0.002) | \n",
+ " 0.039 | \n",
+ " 0.291 | \n",
+ " 6.7% | \n",
+ " 14.6% | \n",
+ " 0.480 | \n",
+ " 22.3% | \n",
+ "
\n",
+ " \n",
+ " Trans.+TS | \n",
+ " 0.160 (0.004) | \n",
+ " 0.324 (0.005) | \n",
+ " 0.037 | \n",
+ " 0.278 | \n",
+ " 10.4% | \n",
+ " 14.7% | \n",
+ " 0.722 | \n",
+ " 23.7% | \n",
+ "
\n",
+ " \n",
+ " ALSTM+TRA(Ours) | \n",
+ " 0.157 (0.000) | \n",
+ " 0.318 (0.000) | \n",
+ " 0.059 | \n",
+ " 0.460 | \n",
+ " 12.4% | \n",
+ " 14.0% | \n",
+ " 0.885 | \n",
+ " 20.4% | \n",
+ "
\n",
+ " \n",
+ " Trans.+TRA(Ours) | \n",
+ " 0.157 (0.000) | \n",
+ " 0.320 (0.000) | \n",
+ " 0.056 | \n",
+ " 0.442 | \n",
+ " 16.1% | \n",
+ " 14.2% | \n",
+ " 1.133 | \n",
+ " 23.1% | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " MSE MAE IC ICIR AR VR \\\n",
+ "Linear 0.163 0.327 0.020 0.132 -3.2% 16.8% \n",
+ "LightGBM 0.160 0.323 0.041 0.292 7.8% 15.5% \n",
+ "MLP 0.160 (0.002) 0.323 (0.003) 0.037 0.273 3.7% 15.3% \n",
+ "SFM 0.159 (0.001) 0.321 (0.001) 0.047 0.381 7.1% 14.3% \n",
+ "ALSTM 0.158 (0.001) 0.320 (0.001) 0.053 0.419 12.3% 13.7% \n",
+ "Trans. 0.158 (0.001) 0.322 (0.001) 0.051 0.400 14.5% 14.2% \n",
+ "ALSTM+TS 0.160 (0.002) 0.321 (0.002) 0.039 0.291 6.7% 14.6% \n",
+ "Trans.+TS 0.160 (0.004) 0.324 (0.005) 0.037 0.278 10.4% 14.7% \n",
+ "ALSTM+TRA(Ours) 0.157 (0.000) 0.318 (0.000) 0.059 0.460 12.4% 14.0% \n",
+ "Trans.+TRA(Ours) 0.157 (0.000) 0.320 (0.000) 0.056 0.442 16.1% 14.2% \n",
+ "\n",
+ " SR MDD \n",
+ "Linear -0.191 32.1% \n",
+ "LightGBM 0.503 25.7% \n",
+ "MLP 0.264 26.2% \n",
+ "SFM 0.497 22.9% \n",
+ "ALSTM 0.897 20.2% \n",
+ "Trans. 1.028 22.5% \n",
+ "ALSTM+TS 0.480 22.3% \n",
+ "Trans.+TS 0.722 23.7% \n",
+ "ALSTM+TRA(Ours) 0.885 20.4% \n",
+ "Trans.+TRA(Ours) 1.133 23.1% "
+ ]
+ },
+ "execution_count": 4,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "report\n",
+ "# print(report.to_latex())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# RQ1\n",
+ "\n",
+ "Case study"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 5,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAewAAADMCAYAAACx8ZDiAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAABZ7ElEQVR4nO2dd3hUVfrHP9PSE9IbNSCEriAgKh2BnwqCriAiiCsqdlBXZXGXoqiwFkRdV0WsgCICIkEpIlXpHRICoQfSezKTTGbm/P4YMiQkgUyYmjmf5+Fhcst7vvPeufe9p71HIYQQSCQSiUQicWmUzhYgkUgkEonk2siALZFIJBKJGyADtkQikUgkboAM2BKJRCKRuAEyYEskEolE4gaonS3A1pSWlnLkyBEiIiJQqVTOliOR2Ayj0UhWVhYdO3bEx8fH2XKcgry/JQ2Za93jDS5gHzlyhIceesjZMiQSu7Fo0SK6devmbBlOQd7fEk+gtnvc4QH79OnTTJkyhfz8fIKDg5kzZw4tWrSocsyyZcv4+uuvUSqVmEwmRo4cycMPP1wn+xEREYD5C0dHR9tavkTiNNLT03nooYcsv3FPRN7fkobMte5xhwfs6dOnM2bMGIYPH87KlSuZNm0a3377bZVjhgwZwn333YdCoaC4uJhhw4bRo0cP2rZte037Fc1k0dHRNGnSxC7fQSJxJp7cFCzvb4knUNs97tBBZzk5OSQmJjJ06FAAhg4dSmJiIrm5uVWOCwgIQKFQAOY+q/LycsvfEonE/ViyZAnDhw9nwIABzJs3z9lyJBK3xKEBOy0tjaioKMvbg0qlIjIykrS0tGrHbtiwgbvvvpv+/fvz2GOPER8f70ipEonERqxZs4adO3fy008/kZCQwNKlS8nMzHS2LIkjyMmBvn1h/37z/2fOOFuRW+Oyg84GDhzIwIEDuXjxIs888wx9+vShZcuW9bZXXl5OamoqpaWlNlTpPHx8fGjSpAkajcbZUpzKxaIMtp7ZxaiOQ2UrjAtiMpmYO3cuS5YsQaPRoNFoiIqK4tSpU0RGRjpbnsTefP01bNsGo0dDSgpMnAhr1zpbldvi0IAdExNDRkYGRqMRlUqF0WgkMzOTmJiYWs+JjY2lU6dObNq06boCdmpqKoGBgbRo0cLtH+xCCHJyckhNTSUuLs7ZcpzK2hOb+e3ERvrH3UpkQLiz5UiuYN++fWRnZzN+/HjLtpSUFEJDQ52oSuIQhIC5c8FkguPHzdu2bYPffoM773SuNjfFoU3iYWFhtGvXjoSEBAASEhJo165dtZv35MmTls+5ubns3LmTNm3aXFfZpaWlhIWFuX2wBlAoFISFhTWY1oLrISnrBABpxbKJ1RU5cuQIo0aNYuXKlaxcuZJ3330XHx8fTCYT06ZN47XXXmPKlCnIRQMbIFu3QkFB1W1aLUyYAPLZVS8cnulsxowZLFy4kCFDhrBw4UJmzpwJwOOPP87hw4cB8wCVu+++m+HDh/PII48wduxYevXqdd1lN4RgXUFD+i71RavXcTb/AgBpRTJguyJ5eXn4+vpa/l6zZg0DBw6kbdu2vP7667z55ptotVq0Wq0TVUrswgcfQElJ9e0FBTB7tsPlNAQc3ofdqlUrli5dWm37/PnzLZ+nTp3qSEkSN+VY9kkE5ppZugzYLklcXBwrVqwA4MSJEyxbtozvv/8egB07drB06VJCQkKqBHVJAyE52dwsfiVaLaxeDTNmOFySuyNziUvclmPZKagUSmICI0krznK2HEkNDBkyBLVazYABA5g6dSpz5861jFnp2bMn7733Hmq1mqSkJCcrldico0fNAbumf7t3O1udW+Kyo8QlnofJZMKEQK2sW2KQpKwUWoY2J8wvhDN55+2sTlIffH19WbBgQbXtO3fuZO3atQghMBgMtG7d2gnqJBL3QgZsJ7NkyRIWL15MUVERw4cPZ9KkSc6W5DS+PrCUbWd383zPR7kppv1Vj9Ub9KTknuHuNgNQKVTsSj2AwWSsc7CXOJdbbrmFW265xdkyJBK3QgZsJ1I5oUR5eTmDBw/mwQcf9Mj5qSV6LX+c+hOjMPH2lo8Z1nYQbcLiCPT2J8Q3mFDfYLxUl+ecp+SewWgy0i6iNUVlxZiEiaySHGIC3dN35XnpaE/swS++B5pG7vkdJBKJfZEB20nIhBJV2XxmB3pjOa8PeIl1J7fyy7F11Y4J8PInzDeYUL9gistKUKAgPrwlFwrTAfNIcXcL2EZtERnL36H07FFAgTo4SgZsiURSIx4bsP/Yc471u87ZxfagHs0Y0K3ZVY+RCSUuI4Rg/cmt3BDagrYRN9A24gbG3ngvhaVFFJQVkacrIFeXT442j1xd/qV/BXSN7UiAlz8xAeYAl+6Gc7FzNy2i9FwSIf0eIrBTX9RBYc6WJJFIXBSPDdjOpiKhxKuvvgqYp7yMHj2amJgY/vnPf/LXX3+xefNmJ6t0DElZKVwoTOep7uMs20IvNYPXhUDvAPw0vlwsyrCTQvtQdjGFov2/06jH3YTcfp+z5UgkEhfHYwP2gG7XrgXbk9oSSgQGBvL222/zyCOPOE2bIxBCsPjQz6QWppFamI6/xpfbmlVfsL0uKBQKYgIiSS+qOrWrRK/FS6VBo3K9fOtCmMhe+wUq/0aE9HnA2XIkEokb4LEB29lcLaGEJ3A08zgrj60jJjCSIC9/hsXfgbfaq972YgIjSc45BUBWSQ4rj61j46m/aB/Zmql9nnO5zHDaE3spu3iCiGHPofT2c7YciUTiBsiA7SSGDBnCqlWrGDBgAGFhYVUSSngCCcc3EOQdwDtD/lVl9Hd9iQ6M5M9ze/jl2DqWHF6FCUHb8FYcTE9iw6k/uaPV9ae2tSXleeaBcn5tujtZiUQicRdkwHYStSWUAJg5cyanTp1i2rRpPP744zRt2tTB6uzLhcJ09l08zMgOd9skWAPEBEQiECw8uIJusZ159OYHCPUN5o1N8/juwDJuim5PuL/rDOgzaQtBoZS1a4lEUmdkalIXZPr06WzZsoXXX3+9wQVrgNXH/0CjVDP4hj42s9ku8gbiQpoysdtDvNzrScL9QlEqlDzVfRwmBJ/tWVSnFaF+P7mNn5Psv16vUVeMyi/Q5ZrqJRKJ6yIDtsShFJYWsfnMDvq06EkjnyCb2Q33C2XO4KkMbNWrShCMDAjnoc4jOJieyKbT269pZ/3JLaw9Yf/R+SZdIUrfQLuXI5FIGg6ySVziUNad3EK5sZy74wc4rMzBN/Rh+/l9fHPgJ26Mbk+oX3CNx5mEiYuFGZQZ9ZQZ9Nc1CO5aGLVFqPxs98JiL/Lz8/nyyy9JSkqqtgTmokWLnKRKIvFMZMCWOAy9sZy1JzbTJaYjTYIcN8DO3DQ+ln+sncXnexbxau+na2yKztXmU2bUA5BRnEWz4MZ202TUFaIJjbWbfVvx0ksvodfrufPOO+USmBKJk5EBW+Iwtp3dRUFZEUPjBzq87OjASB7sNJxvDvzE1rO76NOi+sITqZdSnAKkFWfaNWCbtEWomrh+DXv//v3s2LEDLy/7tTZIJJK6IfuwJQ5BCEFC8gaaBzehY2S8UzTc2bo/8WEt+Wr/j+TpCqrtv1hUKWAX2S/NqRACo7YQlRv0YcfHx5Oenn7tAyUSid2RNWyJQziYnkhqYRrP9BjvtJHRSqWSp3qM4+V1bzF/7/e8fPvEKlpSC9Px9/JDrVCRXpx1FUvXh6lMC8KE0g36sHv27Mljjz3GfffdR3h4eJV9999/v5NUSSSeiQzYEoeQkLyBEJ9G3F7P9KO2IjYomgc6DmPhweWsS9nCkNZ9LfsuFKbTJDAaFArS7VjDNumKANyihr1nzx6ioqL4888/q2xXKBQyYEskDua6AnZpaSlKpVL2b0muytn8VA5lJPFgp+GoVc5/RxwaP5DErBN8vf9HmjSKoUNkGwAuFqZzc2wnTEJwMCPRbuUbtYUAqPxcP2B/9913zpYgkUguYVUf9pw5czh06BAAmzZtokePHnTv3p0//vjDLuIkDYPVyX/grfJiUKvezpYCmEeNP3/L34kOiOT9Pz8nqySH4rISCsqKaBwUQ3RgBHm6AkoNZXYp36Q117CVvq7fJA5QUFDAzz//zGeffcbPP/9MQUH1/n+JRGJ/rArYq1atonXr1gD897//5Z133uF///sfc+fOtYs4T2DJkiUMHz6cAQMGMG/ePGfLsTl5ugK2nttFv7hbCfD2d7YcC35evrzS+yn0xnKWHF5lGSHeOCiK6Evra2fYqR/bqHOfGvb+/fsZNGgQP/zwA8nJyfzwww8MGjSI/fv3O1uaROJxWNU+qdPp8PX1JS8vj/PnzzNkyBAALly4YBdxDZ01a9awc+dOfvrpJ8rLyxk8eDAPPvggkZGRzpZmMzae/guTycTdbRyXKKWuxARGMqhVb349sdGSTKVxUDTa8lLAPFK8eXATm5drvFTDdofEKW+99RbTp0/n7rvvtmz79ddfmTVrFsuWLXOiMonE87Cqht2iRQt++eUXFi1axO233w5Abm4uPj4+dhHXkDGZTMydO5dp06ah0Wjw8/MjKiqKU6dOOVuaTcnR5hHo7U90oGu+hAxtewcqhZJVx9ajUWmI8AsjOiACwG4jxU26QlCqUXi5fiKSM2fOcOedd1bZNmTIEM6dO+ckRRKJ52JVDXv69Om89dZbaDQa3nzzTQC2bdtmCd7uRNGhTRQdtE/fe+CNAwjs3O+qx+zbt4/s7GzGjx9v2ZaSkkJoqOusKGULjCYjaqXzB5rVRqhvMP1b3sa6lC00CYxFqVTiq/ShkU+Q3eZiG7VFqHwD3GLhj+bNm7N69WqGDRtm2bZmzZoGuSiNROLqWPUk7dy5Mz/88EOVbffccw/33HOPTUV5AkeOHGHUqFG8+uqrAJw4cYLRo0dTUFDA1KlT0ev1BAUFMW3aNCcrvT4MJiMqpcrZMq7K8LaD2XByG42Doi3bYgIiSC+2U8DWFbnFHGyAqVOn8uSTT/Ldd98RGxvLhQsXOHv2LJ9++qmzpUkkHodVAXvHjh00btyYpk2bkpmZyXvvvYdSqeTFF18kIiLCXhrtQmDnftesBduTvLy8KrmZ16xZw8CBA+nevTvdu3cH4KmnnqKkpAR/f9cZrGUtBpMBtYsH7Aj/MF7p/RRR/pcTg0QHRrL/4hGMdnjhMGkL3WLAGUDXrl1Zv349mzZtIjMzk/79+9O3b1+Cg4OdLU0i8Tis6sOeOXMmKpX54TVnzhwMBgMKhYJ///vfdhHXkImLi2Pv3r2AuXa9bNkyXnjhBcv+TZs20apVK7cO1gAG4dpN4hV0ielIbKUadsfIeArKinhry0cUlhXbtCyjrgiVm0zpAmjUqBHDhw/n8ccfZ/jw4TJYSyROwqonaUZGBrGxsRgMBrZt28Yff/yBRqOhd2/XmF/rTgwZMoRVq1YxYMAAwsLCmDt3LjEx5hWsli9fzoULF/jHP/7hZJXXj8FkRK1w7Rp2TfRpcQsmYWL+nsX8c93bvHT7RFqGNrOJbaO2EKUL17AnTJjAggULABgzZkytfe1yeU2JxLFYFbADAgLIzs7mxIkTltqfXq/HYDDYS1+DxdfX1/JQrMzGjRv54IMP6NevH9OmTWPy5MluPRDNPOjM/QI2QL+4W2naKJZ3t33Gv/94l4ndHqpxlS9rEMKESVdsqWHnFOj4fl0yfbs0odMN4dc42zGMGDHC8nnkyJHOEyKRSKpgVcAeO3Ys999/P+Xl5UydOhUwj3Zu2bKlXcR5Iv3796d///7OlmEzDCaDyw86uxqtQpsze/AU5v71BR/v/JqTuWcZd9Pf6v0SYiotAWFC5RdIek4J//r0LzJytazdcZYB3ZryxIhO+PtqbPwtrKPyiPCWLVty4403VjumIuOhRCJxHFYF7CeeeIJBgwahUqlo1szcPBgVFcWsWbPsIk7i/rj6tK660MgniH/1m8TCg8v59fgfnMlP5damXblYmEG7yBu4tenNdbZVkTSl2OTN1I+3Um4w8dbTt7M/OZPlG1O4sXU4A7rZpundFvz9739n37591bY/9thj7Nq1ywmKJBLPxeonadOmTdm/fz+HDh0iKiqKLl26oFa79wNZYj8MJiM+am9ny7hu1EoVj3QZSauQ5ny2ZyFJWScAOJqZbFXANl1KS3osvZzcwnI+eKEvrZoE06lVOPcPaI23xjVaI0wmE0KIKv8qOHfunGXwqUQicRxWRdqTJ0/y1FNPUVpaSkxMDGlpaXh7e/Ppp5/SqlUre2mUuDHu3iR+Jb1b9ODGmPYYTUZWHlvHhpPbEELUOQlKRQ07U6vE11tNy8aNLPv8fJzbFF6Z9u3bW75T+/btq+xTKpU8+eSTzpAlkXg0VgXsmTNnMmrUKCZMmGC5mRcsWMCMGTPqvAzf6dOnmTJlCvn5+QQHBzNnzhxatGhR5Zj//ve//Prrr6hUKtRqNS+88IIcie6mNIQm8SsJ8g4AINI/jDKjnsKyIhr51G2aVsVa2BeKFMSE+btstrMNGzYghGDcuHEsXLjQsl2hUBAaGirTEUskTsCqJ+mxY8f46quvqjxkxo8fb1XWo+nTpzNmzBiGDx/OypUrmTZtGt9++22VYzp37syjjz6Kr68vx44dY+zYsWzbtk0+JNwQd8h0Vl8iLyVaySzJqXPArlgL+2yuIDbWz27arpfGjRsD5oQ+SqUSjeZy7b+8vBy9Xo+Xl5ez5EkkHolViVMiIyOrDTTZs2dPnVeXysnJITExkaFDhwIwdOhQEhMTyc3NrXJc7969LVnA4uPjEUKQn59vjVSJi2BOnNJQA3YYAJkl2XU+x6grApWG1Dw90aGunxTn0Ucf5ejRo1W2HT16lAkTJjhJkUTiuVhVw37hhRd4+umn6devH7GxsVy8eJFNmzbxzjvv1On8tLQ0oqKiLANWVCoVkZGRpKWl1TrX+Oeff6ZZs2ZER0fXuF/i2phTkzasJvEKLAG7OKfO55i0hSh8AjAYBdHhrh+wk5OTq03r6ty5M8eOHXOSIonEc7Gqhj1w4ECWL19O69atKSkpoXXr1ixfvpw77rjDLuJ27drFvHnzeO+99+xiX2J/3DXTWV3w0fgQ5B1AZkndA7ZRV4RBbQ7UMWGu2yReQVBQENnZVVsQsrOzq+TBl0gkjsHqqk9cXBxPP/205W+9Xk+/fv3YtGnTNc+NiYkhIyMDo9GISqXCaDSSmZlpSclZmf379/Pyyy/zySefyMQsbow7ZzqrC5H+4dY1iWuL0CvNYzGiw1y/hj148GBeeukl/vWvf9G0aVPOnTvH7Nmzq62RLZFI7I9VNezaSE9Pr9NxYWFhtGvXjoSEBAASEhJo165dtebwQ4cO8cILL/Dhhx/SoUMHW0iUOImGNq3rSiL9w6yqYZt0RRQLb1RKBRHBrl9LfeGFF2jVqhUjR46kS5cuPPDAA8TFxfHiiy86W5pE4nHYpHPRmqkpM2bMYMqUKXzyyScEBQUxZ84cAB5//HGef/55OnXqxMyZMyktLa2yFvR//vMf4uPjbSHXpViyZAmLFy+mqKiI4cOHM2nSJGdLsikNcVpXZSIDwtl54QAmkwml8trvv0ZdEYXKSCJD/FCpbPK+bFe8vb2ZPn0606ZNIy8vj5CQEJediiaRNHQc/iRt1aoVS5curbZ9/vz5ls/Lli1zpCSnsWbNGnbu3MlPP/1EeXk5gwcP5sEHH6zzqHtXxyRMGIWpgTeJh2E0GcnV5RPuf/VFWioW/shVq4l2g/7rCk6ePMmaNWvIyclh2rRpnDp1Cr1eT9u2bZ0tTSLxKOoUsF9++eVa36qNRqNNBTmKzad3sPH0X3ax3T/uNvrG9bzqMSaTiblz57JkyRI0Gg0ajYaoqChOnTrVYAK20WT+bTTsJvGKudjZ1wzYFQt/ZGqVRLd0/f5rgN9++42ZM2cyePBgEhISmDZtGiUlJbz33nt8/fXXzpYnkXgUdQrYzZs3v+r+Z555xiZiPIl9+/aRnZ3N+PHjLdtSUlLceinNK6kI2A26SdwyFzuH9tc4tiLLWW6Zhk5uMOAM4MMPP+Srr76iXbt2/PbbbwC0bdtWTuuSSJxAnZ6kzz77rL11OJy+cT2vWQu2J0eOHGHUqFG8+uqrAJw4cYLRo0djMpmYNm0aRqMRo9HI22+/7bZ9hgZLwG64Nexwv1AUKOo0Urwij3iJ8CYm3D2axHNzcy1N3xW/Q4VC4ba/SYnEnXH9US8NlLy8vCpzWdesWcPAgQNp27Ytr7/+Om+++SZarRatVutEldeHQTT8gK1WqQn1C65T8hTjpRp2icnbLaZ0AXTo0IGVK1dW2bZ69Wo6d+7sJEUSiefScNsqXZy4uDhWrFgBmGvXy5Yt4/vvvwdgx44dLF26lJCQELdOUGEwGQBQNdDEKRXUdS626VIe8RLhTVSoe9SwX3vtNSZMmMBPP/2EVqtlwoQJnD59mi+//NLZ0iQSj0MGbCcxZMgQVq1axYABAwgLC2Pu3LmWBDI9e/akZ8+evPHGGyQlJbntXHSDB/Rhg7kfe8+FgyQkbyA+vCWtw+JqPK6ihq30DXSppTSvRqtWrfjtt9/YuHEj/fr1IyYmhn79+uHv7x4tBBJJQ6LOT1Kj0cjHH3/MU089JVfpsQG+vr4sWLCg2vadO3eydu1ahBAYDAZat27tBHW2wTLoTNWwa9g3x3ZiX9oRvj3wEwD/6vs8naPbVTvOpCvChBL/wEBHS7wufH19ueuuu5wtQyLxeOocsFUqFYsXL+a5556zpx6P55ZbbuGWW25xtgyb4ClN4j2bdqVn067klxbyz/Wz+eHwL3SKalttYJZRW4RO4UNoI9fu5hgzZkydBpUtWrTIAWokEkkFVrVVjhgxgu+//56HHnrIXnokDQhPmNZVmWCfIO5vfxef7VnE3ouH6Na46ipXRl0RJSZvQoK8naSwbowcOdLZEiQSSQ1Y9SQ9dOgQCxcuZMGCBURHR1d5C5dv25Ir8YRpXVfSN+5WVh5bx5LDq+ga2wml4vJEDKO2kEKjF6FBPk5UeG3uvfdeZ0uQSCQ1YFXAHjVqFKNGjbKXFkkDo6JJ3JMCtlqpYlTHoXy44ysOpifSJaajZZ+hpJBik7fLB+zKCCFYunQpCQkJ5OXlsWrVKnbv3k1WVpbs15ZIHIxVAdvd37yFEA0m4YMQwtkSromnjBK/ks7R5pxnaUWZdKm0cqxRW0SJKYooNwrY8+bN46+//mL8+PFMnz4dgOjoaN5++20ZsCUSB2N14pRly5bx8MMPM2TIEB5++GG3WajDx8eHnJwctwh010IIQU5ODj4+rv3gN3hALvGaCPDyQ6VQkl9aaNkmhECUFVMifAgNdO3rVpkVK1bw6aefcvfdd1tedps0acL58+edrEwi8Tysqvr873//4+eff+bRRx8lNjaWixcv8sUXX5CZmclTTz1lL402oUmTJqSmppKVleVsKTbBx8eHJk2aOFvGVTEKz6xhKxVKGvkEVQnYpjItCmGiRLj+oLPKGI1Gy5zrioBdUlKCn597JH6RSBoSVj1Jly5dynfffUfjxo0t23r16sXYsWNdPmBrNBri4mpOaCGxD5endXleBtxgnyAKKgfsSmlJ3akPu2/fvrz99ttMnToVMLcUzJs3j/79+1tlp6Gv+y6ROAKrnqQ6na7aalLBwcGUlpbaVJSkYWAwViRO8awaNkCwbyPydAWWv42X0pKaNH54adyni+Cf//wnmZmZ3HzzzRQVFdGlSxcuXrzIP/7xjzrbqLzue0JCAkuXLiUzM9N+onNyoG9fOHPm8uf9+y9va6jU9L2t9YE1Nirvt6XO2uzV5TxXsVfTdlv8BoUVvPzyy+Lpp58WJ0+eFDqdTqSkpIhnn31W/OMf/7DGjF05f/68aNOmjTh//ryzpXg8G0/9JUb+8KTIKM52thSH879d34nHf37F8nfJiT3i5Kz7xMy3F9XbpjN/29nZ2eLgwYMiMzPTqvOMRqMYPHiwyMvLs2y77777xPbt2+ulo04+ePddIZRKIQYPvvy5TZvL2xoqNX1va31gjY3K+22pszZ7dTnPVezVtL0O/r/W79uqgF1UVCRefvll0bFjR9G2bVvRsWNH8fLLL4uCggJrzNgVGbBdh/UpW8TIH54UOSV5zpbicL4/tFKMWvKUMBqNQgghCg9uFCdn3Sdmf7yq3jad/dvetm2b+Oqrr8S+ffvqfM7u3btF165dxT333GP51759e5GcnFwvDdf0gckkROPGQoAQvr5ChIWZP1f88/MT4tdf61W2S1Pb97bGB9bYqLzNGp/WpYya7NXlPFexV3l/5e11+A1e6/dtVVtlQEAA//nPf5g9ezZ5eXmEhISgVHpe/6Skbnhi4pQKgn2CEEJQqC8m2CfIsvCHb1CIk5XVjRdffJFbb73VkvXs888/58MPPyQ+Pp4PPviAGTNmMGLEiGvaqW3d94KCAqZOnYperycoKIhp06bZRvjWrVBwqStCp4Mru+u0WpgwAU6dAhefZWEVtX1va3xgjY3K26zxaV3KqMleXc5zFXuV9+/adXl7BdfxG7Qq2vbo0cN8klJJWFiYJVjfeuutVhUq8Qw8dVoXmAM2QL7O3Hdt1BZiFAqCghs5U1ad2bdvHwMGDADAZDLx5Zdf8t5777Fs2TLmzZtX5+U1a1v3vXv37rz11lu8++67pKWlUVJSYhvhH3wAlW3VNI2zoABmz7ZNea7C1b53XX1grY3K2+rq07qWcaW9up7nKvYq9l+5vbbz64hVAbu8vLzGbSaTyeqCJQ0fT8slXpkQX3Ngzi81v12XFRWgFd6EuPjCHxUUFhYSFhYGQGJiImVlZdxxxx0A9OnTh4sXL9bJTlxcHHv37gUur/v+wgsvWPZv2rSJVq1a2W65zuTkmoNLZbRaWL3aNuW5CnX53pWpyQfW2riWvZqoaxlX2rsebc6wV7G/tnLq+Rus05O0YvUevV5fbeGP9PR0unTpYnXBkoaPJ6YmrcBSw740tau0qIAS4e02SVNCQkJITU2lSZMm7Ny5ky5duqC6tEyqVqu1fL4WV1v3ffny5Vy4cMGqEefX5OhR29lyJ2zxvR3hu/qWYWttrm6vFuoUsEeOHIkQgsOHD3P//fdbtisUCsLCwujZs6fdBErcl4omcaUHzsNudEXArsgjHusmSVNGjhzJxIkT6dWrFz///DP//ve/Lfv27NlDy5Yt62SntnXfN27cyAcffEC/fv2YNm0akydPrjZlVCKRVKVOAbsih/iNN95Iq1at7CpI0nAwCiNqpbrB5G+3Bh+1N75qH/IvzcU26YrQCm9CG7lHDfvJJ58kKiqKI0eO8NprrzF06FDLvtzcXB599NHrst+/f3+rk69IJJ6OVZ2L33//PXfddRddu3a1bNu3bx+//fYbr732ms3FSdwbg9HgkQPOKgiunJ60rJhiU5TbNImD+UW9pgV/3H0RIInEXbGqrTIhIYGOHTtW2daxY0cSEhJsKkrSMDAIo0f2X1cQ7NuIvNJChBCoDSXolb74eHveADyJRGIbrArYCoWi2mpXRqNRjhKX1IjBZPTIEeIVmGvYBQh9KUphQngHOFuSRCJxY6wK2N26deODDz6wBGiTycRHH31Et27d7CJO4t4YTAbUCg+uYV9qEjdemout8A10siKJROLOWFX9ee211ywjR2NjY0lLSyMiIoJPP/3UXvokbozR5OFN4j5B6MpL0RblAuDlH+RkRdZhNBp55JFHWLBgAV5eXs6WI5F4PFYF7OjoaFasWMHBgwdJT08nJiaGzp07y/SkkhoxmIweP+gMILcwAwDvwGAnqrEelUpFamqq7PKSSFwEqzsYlUqlTJQiqRNGD+/Drsh2lpWfSTjg1yjYqXrqwzPPPMOMGTN47rnniI6OrjJFT76oSySO5ZpP0zvvvJPffvsNMC9mX9uc2k2bNtlUmMT9MZgMqDz4oV5Rw84oyCEcCAxxj4U/KvOvf/0LgJUrV1q2CSFQKBQkJSU5S5ZE4pFcM2C/8cYbls/vvPOOXcVIGhZylLg5YOdp8zEJBcHh7hewN2zY4GwJEonkEtd8mlYeAV6xWpdEUhcqMp15KkHegSgUCvL1xWiFFyFBfs6WZDWNGzcGzDNCsrOziYyMdLIiicRzuebTdN68eXUyNGnSpDodd/r0aaZMmUJ+fj7BwcHMmTOHFi1aVDlm27ZtvP/++xw/fpxx48ZZ1tKVuBcGowEvtcbZMpyGUqkkyDuQolIdJcKbVkHuk+WsgsLCQmbOnMnatWtRq9UcOHCADRs2cOjQoSqrbkkkEvtzzQ7G9PR0y7+zZ88yf/58tm/fzrlz59ixYwfz58/n7NmzdS5w+vTpjBkzhrVr1zJmzJgaF65v2rQps2bNYsKECdZ9G4lLYfDwGjZAiE8QxUKPVvjg5+N+vpg+fToBAQH88ccfaDTml68uXbpYxrVIJBLHcc0nyNtvv235/MILL/Dee+8xZMgQy7Z169axZs2aOhWWk5NDYmIiX331FQBDhw7ljTfeIDc3t8pKPc2bNwfM/Wd6vb5u30TichhMRlQenDgFzP3Y6YpUytV+brkIyvbt29m6dSsajcaiPzQ0lJycHCcrk0g8D6uG8G7ZssWyiH0FAwcOZPPmzXU6Py0tjaioKMtauiqVisjISNLS0qyRIXETDCaDx9ewg30aoVWZMGn8nS2lXgQGBpKXl1dl28WLF4mIiHCSIonEc7EqYDdv3pxFixZV2bZ48WKaNWtmU1GShoGnZzoDaOQTSIlKgfBxz4A9cuRInn/+eXbs2IHJZGL//v28+uqrjB492tnSJBKPw6rqz6xZs3j22Wf54osviIqKIiMjA7VazUcffVSn82NiYsjIyMBoNKJSqTAajWRmZhITE1Mv8ZKqbDmzk3C/ENpHtnG2FEBmOgNopPHDqFBg8He/EeIAjz/+OF5eXrz++usYDAamTp3KAw88wPjx450tTSLxOKwK2O3bt2ft2rUcPHiQzMxMIiIiuOmmmyyDUa5FWFgY7dq1IyEhgeHDh5OQkEC7du2q9F9L6s93B5YRExjJ6wP/4WwpgMx0BuBnMr+wlPt5O1lJ/cjOzuaRRx7hkUceqbI9KytLNotLJA7mutJQde/enfLycrRabZ3PmTFjBgsXLmTIkCEsXLiQmTNnAuY3+cOHDwOwZ88e+vTpw1dffcUPP/xAnz592Lp16/VIbfAYTEYKyoo4kXOaUkNZnc8rLS+lRF/362edJs/OdAag0pnzcBv83HPxjMoDTCtz9913O1iJRCKxqvqTnJzMU089hZeXFxkZGdx1113s3r2bFStW8MEHH9TJRqtWrVi6dGm17fPnz7d87tatG1u2bLFGmseTX1oAgFGYOJZ1kpti2tfpvI92fk12SS6zB//TJqOYtdpSvLzUqNVqj890BqAsMc9yMPq6px+EENW2FRcXu+WId4nE3bHqKTJjxgyef/55RowYQffu3QFzLbsi37DEeeTpCiyfj2Qm1ylgG0xGDmUco8xQxpn8VOJCml63jqT3n6KkRW/6jH1UzsMGlMXm1g6Dj3u1NFSsG1BWVka/fv2q7MvPz5c1bInECVj1NE1JSWH48OEAljdsPz8/ysrq3gQrsQ8VAdvfy4+jGcl1Oudk7hnKLjWfbz2z87oDdqlOR4iikKKcc0DFtC73ClS2xlRUjAaBXm10thSreOeddxBC8MQTT/Cf//zHsl2hUBAWFkbLli2dqE4i8UysCtiNGzfmyJEjdOrUybLt0KFDclqXC5CrywfgtqY38/upbRTrSwjwuvpUoiOXAnu7iBvYdm43Y2+877qWTCzMNc/XVZUVYjKZEEJ4fA3bUFJIoLcJLe6VAKhi3YAdO3bg6+vrZDUSiQSsDNiTJk1i4sSJjB49mvLycj777DN++OGHKit6SZxDnq4ApULJbc26sf7kVpKyUuje+MarnnM08zjNGzXmztb9ef+v+RzOPMaN0XXr+66J4ksJNrwMxRiEuUbp6ZnOjNoi/NSCgrIiZ0upF59//nmt++q6foBEIrENVlWn+vfvz/z588nNzaV79+5cuHCBjz76iF69etlLn6SO5OkKCPFpRHxYS7xUGkvtuTbKjeUk55yiQ1Q8XWM74afxZeuZXdelQZtvDti+phIMJgOAx9ewKSvGz6gkv7TQ2UrqReW1BNLT0zl8+DBffvkl586dc7Y0icTjqPPT1Gg0MmTIEH799VdmzJhhR0mS+pBXmk+IbyPUKjXx4a1Iyjpx1eNP5Jym3FhOx8g2eKk03NKkC9vP70UIUe8RwLqiQvyAAEUppaWlAB6f6UxVXoKfUHHRTQN25bUEKtiyZQurV692ghqJxLOpcw1bpVKhUqnkADMXJVdXQIhvIwBiA6PI0uZe9fgjmcdRKBS0i2gNQOOgaEoNZVbN4b4Sfcnlkeq52RkAHp/pTGPU4YcXxfoSyo3lzpZjE3r16sXvv//ubBkSicdhVXvlww8/zOTJk5k4cSLR0dFVamJNm17/lCBJzeiN5XyzfynFei2Tbn0UpaL6e1aeroB2ETcAEOYXQoleS6mhDB91zRm2jmYeJy64Kf5e5pSZFQPUivUl+Grqt26zoeRyP21eVhbg2U3i+nIjvkKHvyocKKOgtIhwf/fK6nf+/Pkqf+t0OhISEmQ6YYnECVj1NK0YXPbnn39W2a5QKEhKSrKdKgkAJoOeU+u/YH7ZeU6X5QNwY3R7BrS8rcpxemM5xfoSQn2DASz/52rziA2Krma3zKDneM4p7mrd37It4FLgLtZrifAPq59e3eWAXZifDXh2k3heURn+yjIaaQKAPPJLC90uYA8aNAiFQmFJoOLr60u7du2YPXu2k5VJJJ6HVQH72LFj9tIhuQKjtpD0pXP4TKST5q3mmeZ9+F2XxqKDy+neuDOB3gGWY/MvzcEO8TE3iYf5hQCQo8uvMWAnZ5/EaDLSITLesq0iYJfoS+qtWZSVYBQKVAqBrtDcJO/RATu3AC+FkWC/YCg/b8lG507Ie14icR3q1Iet0+l4//33efLJJ/noo4/Q691rTqm7IIwGCnb/SsaK90n9/AV0aSe54OdDL+FP0y2/MDa2ByXlOr4/tLLKebkVAftSzTrs0v852qrrGFdwNPM4SoXS0oQOlZvE659XXKkvoUBpfmmo6M/25D7svBzzS0tEkLnFwl1HihsMBnbv3k1CQgJ79uzBYDA4W5JE4pHUqYb9+uuvc+TIEXr37s3atWvJz8/n3//+t721eRw5G76hcPevqIPC8W7cBrr0x7D3K1rfPBRNwQrUGxZz5+0D+fX4Rga0vJ0bwloA5hHiAKGXBp2FXqphVyRTuZKjmcdpFdq8Sl915T7s+qI2aClTB6LV6ygvLQQfz+7DLs7LJQqICouGnKrpY92FkydP8tRTT1FaWkpMTAxpaWl4e3vz6aef0qpVK2fLk0g8ijrVsLdu3cqCBQt45ZVXmD9/Phs3brS3Lo+jOPFPCnf/SlCPoTR77jOiR75Klq85oDYNa0Fw75EYCrK4J7g1wT5BzN+7GJPJvBJUnqWGbQ7YXioNgd4BNdawS8tLOZl7hg5XrJlduQ+7vniZdJg0fmgV/ohLgd+Tm8S1+fkABIeEE+gd4JY17JkzZzJq1Cg2b97MkiVL2LJlC6NHj5ZTOyUSJ1CngK3VaomMjAQgJiaG4uJiu4ryNPTZqWSt/gTvxvGEDRhr2X6xKB2A2KAo/Nt0R6H2wpi8m4e7/I3TeedZf9K85GiurgC1Ul0lFWmYbzA5NdSwj2WfxChMdKzUfw3gpfZCo1RfV8D2FqUIb3/K1AEoDGY77pbpbN3OsyzfmGITW/pi84uU2j+IYJ8gtwzYx44d4+9//3uVGSHjx4+XfdsSiROoU3ul0Whkx44dlpGiBoOhyt8At956q30U2phz6YUUlujp2Crc2VIAMOl1ZCx7B4Xai6j7XkKh0lj2pRam08gnyBKI/VrfTMmx7dw66O9siIznh8Mr6dm0C3k6c9KUyg/VUL8QcmuoYR/JTEalVBEfXr05M8DLv95N4uUGI36UUuQTiEGvR206D3jbvUn8bFohWw5c4KEhbVEqr2/JRyEE369LBiG4r/8N1z7hGpSXmAO00jfQbQN2ZGQku3btqnJ/79mzx/ICL5FIHEednqZhYWFMnTrV8ndwcHCVvxUKBRs2bLC9Ojvw++7zrNiUwvA+rXhkaHvUKuetJiWEICvhE8pzLhLz4L9RB1WdTnWxMIMmlUZ5B7TvTUnSdkrPHmHCzaP5x9pZLDy4gjxdAaGXRohXEOYbzInsU9XKPJpxnNahLfBWe1XbF+DlV++AXZRfgEohUPkGYigvR1NShjlg26+GrS83Mvvb3aRmFtP7psa0iAmqqkmrx8dLhUZdNw1n04vIztcBUFpmwMf7+l42Kqa5qXwDCPYJ4liWbWrujuSFF17g6aefpl+/fsTGxnLx4kU2bdrEO++842xpEonHUacn0h9//GFvHQ5j3J3tKDcYWbnlJMlnc3llXHciQpyzGlHh7tWUJP1FaP+x+MZ1rrJPCMGFwjRua9bNss33hi4ovP0oPvonjYc9w7D4O/g5aS0+am9uiu5Q5fwwvxCK9CXoDXq8LgVnrV7Hqfxz3Nfuzhr1BHj7U1LPJvHCioU/AoIwmATozP3r9gzYC9ccIzXT3D1z9FROlYBdUFzGs+9spHlMIG9MvK1O6Vb3JGVYPl/MLqFl40ZXOfraKPXFlGu8UajUhPg2Ir+08LpSvzqDgQMHsnz5cn777TcyMzNp3bo1zz//PHFxcc6WJpF4HB63WLFGrWTivZ15ZVw3zqYXMun9Tew7lulwHbpzieT8/g1+bXrQ6NYR1fYXlBVRUq6jcaUatlLthX98D7TJOxCGcu5rfyfhfqGUGsosA84qsCRPqdSPnZh1AiEEHaOq9l9X4K/xq3cfdsmlAVY+AUF4B4ViuhSU7NEkri0tZ/O+VH7enMKQns0JDfIm8XROlWM+X3GY/OIyDp7IZtvBi3WyuycpAz8fs94LWdc3TsNgNKE26DBqzN0ZwT5BlJsMaMt112XXGcTFxfH0008zY8YMnn76acrKynj++eedLUsi8Tg8LmBX0Pumxrw/uS+hQd7M+GI7i9cew2gS1z7RBhiK8shc/h6akCgihz1bY43rQqF5wFnjKxKfBLTvhalMi/bkfnzU3vy96ygAwvyCqxxXOXlKBUczj6NRqmkdVnPt6Hr6sHUF5nJ8GwXjFxKO8dJ2W8zDLtLq2XkkjQW/HOHFDzbz4L9/491Fe4kO8+fRYR1oHxdG4qkcy5iK7YfT2HLgAg8OjqdlbCO+/OUIpWVXnztcrCsn6Uwug3o0B64/YOcXleGvLEVcSnAT7GOu/btLP7ZOp+ODDz7gySef5O2336a4uJjz58/zzDPP8OCDDxIWVr9seBKJpP547iRZoElkIO9O6sOnyw/x/bpkkk7n8tJDNxMcWHP+bVsgjAYyV7yHSa8jZsx0lD7+NR5XW8D2bdEJpV8QxYnb8I/vQbfYzrzc60naXjGIrKbkKUczk2kTbl5+sybMfdj1q2GXXRoR7R8cgtLPgNFSw65/wE7LLuE/3+0mJdVsW6NW0qZZCCMHtKZ9yzDatwjFx1tNh5ZhbDt4kcw8HUH+Xvxv2UFaxjZi1B1tuLF1BFP+u42lf5xg3J3tai3rwPFMTCbB7Z1j+fPQxesO2LmFpfgr9Kh8zalIKwfsK6+pK/L666+TmJhIr1692LJlC8ePH+fUqVOMGDGCN954g9BQ90qxKpE0BDw6YAP4eKmZPLorHeLC+HT5ISa9v4lXH+5G+zj71CBy//iO0vNJRI6YjFdks1qPu1CYjrfamzDfkCrbFSo1/m17Unx4MyZ9KUovH7o3vrHa+VcmTykqK+ZMfiqjOg6rtcwAb3/KDGWUG8vR1BLUa6P80sIfgaGh+JgUGC81GtS3hl1WbmT2N7vJzNMy9s62dGwZTuumwXhpqtvr0NJ8rY6eyiG/qIy8ojKmPtIDtUpJh5Zh9OvahOUbU7ijezNiwmt+QdqTlEGgn4Y2zUNoHOHPxToE7NIyA3uOZbA/OQuD0cQz999o0WcO2KWo/c2BOvjSoEB3SZ6ydetWVq5cSVhYGOPGjaNfv34sXLiQbt26XftkiURiFzy2SfxKBt3SnHcn9cHbS8U/P/mTFZtSqkxbswXFiX9SsCuBoO53EdCh91WPvViUTuPAqBqbywM69EKUl6E9sbvW833U3vh7+ZF9aZnNxEvrY3e8ImFKZfw1FfnEra9lG7XmgO0b1IjA4EboL/20rOnDLjeYyMjVoi83Mv/nw5y6WMCLY7rywB3xdGgZVmOwBmgWHYS/j5r9xzNZsSmFm9pE0LbF5RrgI0Pbo1ErWPDLkZq1G03sScqgS5tIVEoFjSMCuJBZfM3r//qCncz5dg/bDl7gjz3nmbdkv+WcvMJS/JVl+ASaA3Wwr3s1iWu1Wkuzd3R0NH5+fjJYSyROxuNr2JWJi23E3Ml9+fDH/Xy56iiJp3OYNLorAb7W1TZrwqQvJWv1//BuEk/YwIer7Pvr3F6+P/QzKqUKfy8/WoU253TeeTpHt6/Rlk/TdqgCQyk++udVA3+Ybwi52nzAPJ3LW+XFDaEtaj0+wPtStrNyLcG+1o2QFmXFlAovFEoVCkCrMHcrWNMk/tGP+9m4N9Xy98iBrene/trNxyqlgrYtQtl06dzRg6oOqgtr5MsDd8Tz9epE9h7L4Oa2UVX2HzmVQ0Gxnts6xwLQOCKAklIDBcX6WrtHEk/ncPhkNuPubMff+t/A8k0pfPtrEjHh/oz9v3bk5ZcQrzDg18jc0uGv8UOtVLtNwL4y9wLgtrkXJJKGggzYV+Dvq2HKw935Zespvlp1lBfmbmLKw91p1ST4uuzqTh1A6HWE9n2wSnKUY1kn+Xjn1zQOjCI2KJqC0kI2nvqLMqOeuOCa1xhXKJQEtL+dgt2/YdQVo/INqPG4ML9gcnTmPuyjmcnEh7dCrar9klvyiZdZX8NWlJVQprycm7xM6QUI1HXMdJaaWcTmfanc3jmWuMZB+HipGXp73acOdWgZxt5jmXRsFWZpIq/MPX1asm7nWeb/fJjO/4hAo77cuLT1wAV8vFTc3M6cDCQ2wuzPC1nFtQbs5RtTCPTz4p7eLVGplNw/oDVp2SUsWX+cnh1jKCkw+72iSVyhUFxKnuIeTeINKfeCRNJQkAG7BhQKBcP7tCK+WQhzvt3Nyx9t5YkRnRjSs3md59AKIcgpKCU82DzHuyR5J0qfAHyaXa41ZxZn8+6fnxLhF8r0/i8Q4G0OmAaTkYuF6cQE1p5Nyr99Lwp2rqIkeSdBNw2s8ZhQ3xBO5JxhV+oBzhem0bvFLVfVXNsCICaTQKHgqt9dWV6CXnl5PrveKwAoqnNq0qUbTqBWq3jyvs71GvTXrV0U369L5qEhbWvcr1GreHxEJ2Z+sYNftpzkbwNaA+bm8L8OpdGjfTQ+XubboUnk5YBdU/A/n1HErsR0Hrgj3pJcRaFQMOGejmzef4G1O85iyDcHbKVfoOW8YJ8gCtykht2Qci9IJA0F2Yd9Fdq2COWDF/vRqVU4//3pIHO/33fN6UEVrNh0ksfeXE9OgQ5hNKBN2Ytf624oLjURa/U6Zm/9BKMw8Wqfpy3BGszNyM2CG1914Jd3TCvUIdGUJG6r9ZiogHCK9SW8++dnANxUSxN7Bf6WBUAuB2whBM++u5EvVx296rkaow7jpT5wAOEXiEIIdPm5Vz0PzKPBN+1L5a7bWtR7hH5cbCN+fOvuq6ac7dYuih7to1nyezI5Beb50IdSsinS6ul1U6zluIgQP9QqRY0Dz/TlRpasP45GpWRor6otAP6+GnrdGMvmfamWeekq30oB27cReTr3CNgSicT1kDXsa9AowJvpj/Xkxw3HWbz2GCcvFDDl4e40jQqs9ZwirZ4fNxzHaBIcP5cH5XsIKy0hIr4HAEaTkbnbvyCtKIPX+j5HbGBUrbZqQ6FQEND+dvL/WoGhOB91QHC1Y/7vhr40a9QYP40v4f4hhPtdfSpOxYpdB05eoE9zgVKpIDu/lPMZRVzIKmZQj2Y0iw6q8Vxvkw691+UWAVVAEKqSC6SfSOaGsIhayzSZBN/9loRKqeDefteXv7suaWYfG96RZ975g69XJ/LSmJvZeuACvt6qKv3aKqWCmHB/SxY1g9HEgeNZbD1wgR1H0tCWGrinT0saBVR/uRh8S3P+2HMenSkPAkHle9lfwT5BHM8+eV3fUSKReC4yYNcBpVLB6EHxtG0ewruL9vLiB5t5duRN9O3apMbjf9pwAm1pOQoF7D13jC3Fv9ErIohnW94EwDf7f+JgeiITuz1Ex6iam3DrQkCHXuT/uYySpL9o1P2uavt9ND50je1YZ3t+GnOT9uZDZ+jfLJsb20SQkppHmLKIEnxZsOooMx+vPtDIaBL4Uobe+3Jfuk9wMKoSQf75k9CzV43l6cuNzP1+H9sOXmTM4HhCg3xqPM6WxIT7c2+/G/jx9+McPJ5Fia6c2zrHVhuB3jgigJTz+Xy89AB/HbpIkbYcfx81t3eOpddNjbmxdc0vIe3jQmkaFYB/fhlgXvijgmCfIIrKSjCYjB697KhEIqkfMmBbwU1tIpn3Yj/+890e3l20l8TTOTw2vGOVxSWy83UkbDtFv65NOH4unxO5p8AL/mzkw6D8c5zNv8CalE0Mjb+Dga1qDmR1xSuiGV6RzShO3FZjwLYWpUKJWnhjUJdz9HQON7aJ4MS5HF4K+hVTUBTTjvVhT1IG3dpVbREoLinDT6mnuNLgN28/LxRCgSHrbI1l6cuNTJ+/nSMnc3h0WAdG9K2+epi9GD2oDcEB3py8kE9GrpZhvVtWO6ZZdBA7jqSzeV8qt3SIoU+XxnSJj7jmQiIKhYLBtzTn/Lq/AFD5XfZJsE8QAkFhWZEldaxEIpHUFRmwrSSskS9vPnU73/6axIpNKRw/n8+Uh7sTFeqHwWjio6UHMAnBQ//Xjm9WJ3JMf5pghRG1XxAf/LWA/LJCusV2Zmzne22ix799L/I2Laa8IBNNo+tf8tBkUIO6nKOnzLm5808fx19ZBsXnGB16mC9XBXFTm4gqzc85WdkAaPwvTwUTwoRCKPAqqp7HWwjBp8sPceRkDi+O6Ur/m2seDW8vNGpVjUG6Mvf2bUWHuDDatwy1DEarKwO7N2PTHiUYfarMCLBkO9MVyoAtkUisxuMGnZkMevSZ5zCWltQ7MYpapeTRYR147e89SMsqZvL7m9h5JI15P+xn37FMJt7bmahQP1rEBFLmnU9caTlPdRtLXmkBzRo15vmef0eptI3rA9rfDkBJ4l/XbSu3sBRDmRq1xkDyuTzKDSY02ccB8G9/O7dwkODcRNZuP1PlvONJ5n7Z6MaXa94GkxGFQkWgIQdhLK9y/JodZ1m/6xyj7mjj8GBdVwL8vOjaNtLqYA0Q6OfFLTcEWKZ0VVCxQIu7TO2SSCSuhcfVsHN//4bCvWsAUHj5oA4KRx0YhjooDFVQOOqgMPO2S5+VXrUvvdmzYwwfvNiPt7/ZzayvdgHm5Tv/79YWAIRHCspzTDQpb0SnZjfxuu8/aBwUhY/Gdn21mpBovKJbUXJsB8E1rPplDUmncxEGNcHBJtL0RrYfvkgzcZEyvyjihj3HhbwMxim28991kfTt2oQdaTvZcPJPOpwQ3ICCqA5dLbYMJgMqpQYVJrQZqfjHmkdUHzuTy+crDtG1bSRjapmC1RAwaouqjBAH91sARCKRuBYeF7BD+j6IT7P2GApzMBRmYyjMxliYgzbzLMaSAqBqrVvp428O5oGVg/nl/yODwnjnud4sXHMMf181Iwe2tpxbJs4DEOTVAoC2Efbppw1o15PcjYswFGShblT7iOxrkXgmhzDKUelzaanOIGHzCf6uyUTTtDcKtYao+15CP/8f/K38d2b+msVZo3mqV5TaQIF/C9QBl/OeG4QRtdo8ijrzZDJxsXHkFpby9je7CA/25eWHbkaldJ91oa3FpCtC6Vu1ht1IBmyJRHIdeFzAVvkGWJqRr0QYyzEU5WIozMFYKaBXBPeytBRM2uoPW6VfEIMDwwi8aSAKxeW0mOfSD6IxCbINraudY0v825oDdknyThr1GGrVuUaTQHkpKUriqRzi/PNJVin5W1AiP19U4t3IQFjbLgBogiMJGDqR9Tu+5KzxKP2b9uFk5gkSg1O5I7RnFbsGkxFvb1/KhZKy1FOUG0zM/mY3JaUGZj5xGwF+Xjb7/q6IUVeIJiy2yjYvlQZ/jS/5ci62RCKpBw4P2KdPn2bKlCnk5+cTHBzMnDlzaNGiRZVjjEYjs2bNYuvWrSgUCp544glGjhxpd20KlQZNcBSa4NrnRZsM+krB/PL/ZWknyVm7AO/oOHyamJt6T+SfJ7bMxOFcv1rt2QJNaCxekc0pTtp+zYCtLS0n+WweiadzSTqTQ/LZPCJD/XhlbDd8M48S0kyHTulPC87T38fczx7QshMAFwvTmXNqHZl+3tyfUYA+S8cNvgr+CFBR0qZxlXKMJiO+Xl6kG4MJyj7Hgl+OkHQml5fH3kyLmJrncjckjLriKlO6Kgj2aSRr2BKJpF44fNDZ9OnTGTNmDGvXrmXMmDFMmzat2jGrVq3i3LlzrFu3jiVLlvDRRx+RmppagzXHo1R7oQmNwbdFJwI79yOk1/1E3DWR2LEzUTcKJ3PVxxj0Osr0JaSKMsIJ5mx6MQajya66fNrcQmlqMjt3J2Ewmig3mPh91zneX7yX//50kP/+dJDJczfx4L9+Zdrn2/nx92SKSsrpf3NTcgtKefGDzQzwPoyPygehAJ2XD128z1LkFYnKL4iD6YlM/f0/lJRrmdZ/MvHEcHP+WrpdPI6fETanHaiix2AyoFGpyVFHEliQwg2HPuHF+NN0C8rCVI9c5e6EMJYjyrTV+rDBvGpXngcOOluyZAnDhw9nwIABzJs3z7licnKgb184c6bq52sdv39/zedZY68u59XXnsR1sNN1cWgNOycnh8TERL766isAhg4dyhtvvEFubi6hoZezcP3666+MHDkSpVJJaGgod9xxB2vWrOGxxx67bg2pBWnsuXjouu1ciRCCs23jOZyRROnyF4lReGFSKGgS2haD0cS3vyYR6Hf9q37VhK7MwIFdJp7VCM6t+oIDv0agUCjQlZUT4K1GCDAKwa1BPgzv6EdkiB8Rwb54aXKBXO4KKmPrrhRaKrLQtRgIeYfZdkNbvC8epywkggP7f+LXE3/QLCiWV3o/RYR/GLpxUzjx38mEKrW0V7djb9phlh391bL+dUZxFuF+oZwM601OqopOAblE5vxF+g9bQaHEKyoOn2btUPtbtyqYO2DSm5OmqPxqqmEHcTjjGD8nra22T6lQ0qfFLZbBaQ2FNWvWsHPnTn766SfKy8sZPHgwDz74IJGR1z8NsV58/TVs2wYTJ8LgwZc/r61+TaocP3o0pKRUP6+2zzXZq61sW9irTb/E8djpujg0YKelpREVFYVKZX6oq1QqIiMjSUtLqxKw09LSiI293P8XExNDenq6TTT8dX4PPx391Sa2riTIO4B4vwi8Mi9wKkDgr1Jx681DWLF9Pys2pdilzApuat0cAy25OecUcMa88cqWeB1wwfzvyizZtytA+IXS9sY7UG1JZKP+IoQHAFlwfAO3NOnCMz0etoxw9w0Jp7zXRE5v/YF7etzPkb2fsOTIqio2O0e3J6LtDazNVzPq2d4EegnKLhxHdy6R0vOJFO1bhzDobe4Ll0CpQhPRrNrmVqEt+PPcHhYf+rnaPgUKogLCuaVJFwcIdAwmk4m5c+eyZMkSNBoNGo2GqKgoTp065ZyALQTMnQsmE2zdCnv3mj9v2wa//QZ33ln78cfNUxyrnFfb55rs1Va2LezVpl/ieOx4XTxu0NnIDkMZ0e7/7GJbo1SjUCjMSUMUSoQQKBQKlsyKwWiq35zvuqBQmJOBCNETYazb4iQ12lGqUChVfPe3eZjE5SZ8BdS4EMnN/Xoj+vZCoVDwVbP3MIqqzf4V/hjRt5VlpS/fuM74xnUGQJiMCJOx3npdGYVCUSVpSgVD4wcy5IY+1PRrqM3P7sy+ffvIzs5m/Pjxlm0pKSlVXtAdytatUHCpS0Kng9JS82etFiZMgFOnwMen5uMrqHxebZ9rsldb2bawV5t+ieOx43VxaMCOiYkhIyMDo9GISqXCaDSSmZlJTExMteMuXrxI587mB/uVNe7rQaFQ4GXnh6JCobSUBaBSKVE5IHW0QqFEob7+0dfmPNd1E2z5jkoVqlrOqW1ZzooXBE+joQXlq3HkyBFGjRrFq6++CsCJEycYPXo0MTEx/POf/+Svv/5i8+bNjhP0wQdQUmn52MrJkwoKYPZsmDGj9uNrOq+2z1fau1rZtrBXk36J47HjdXHooLOwsDDatWtHQkICAAkJCbRr167a2/b//d//sXTpUkwmE7m5ufz+++8MGTLEkVIlEokNyMvLw9f3cvKhNWvWMHDgQAIDA3n77beJi4u7ytl2IDm5ahCsjFYLq1fX/fhrcaW967FVF3s16Zc4HjteF4c3ic+YMYMpU6bwySefEBQUxJw5cwB4/PHHef755+nUqRPDhw/n4MGDDB48GIBnnnmGpk1dM4WlRCKpnbi4OFasWAGYa9fLli3j+++/d56go1df1/26j3eULXvYk9gGO14XhwfsVq1asXTp0mrb58+fb/msUqmYOXOmI2VJJBI7MGTIEFatWsWAAQMICwtj7ty51brAJBJJ3Whwg86MRvMgJluNKpdIXIWK33TFb9wd8PX1ZcGCBTXumzlzJqdOnWLatGk8/vjjdWpFk/e3pCFzrXtcIeq7ZJWLsmfPHh566CFny5BI7MaiRYvo1q2bs2U4BXl/SzyB2u7xBhewS0tLOXLkCBEREZb53hJJQ8BoNJKVlUXHjh3x8dCpO/L+ljRkrnWPN7iALZFIJBJJQ8ThucQlEolEIpFYjwzYEolEIpG4ATJgSyQSiUTiBsiALZFIJBKJGyADtkQikUgkboAM2BKJRCKRuAEyYEskEolE4gbIgC2RSDwemY7Cvkj/2gYZsB1IYmIixcXFzpbhMkh/mJF+sB5b+6ysrMzy2R2Dy7lz5yi5tAazyWS6bnvSv67pU48J2Fu3bmXWrFlOeTDu2bOHMWPGsHjxYhQKhcPLrwnpDzPSD+6FrX22Y8cOnnjiCWbMmME333wDYNdrYevf286dOxk7diwzZ85k/PjxGI1GlMr6P9bd0b8e5VPRwDEajeLLL78UgwcPFu3btxcrVqxwWNl6vV688sorYujQoWL16tVV9plMJofpqIz0hxnpB/fC1j4rKysTs2bNEiNGjBC///67WL16tXjsscfEjz/+aCvJVbDH7+3o0aPivvvus/jj3nvvFYsWLaqXLXf0r6f5VAghGnwNW6lU0qRJExYtWsT777/PN998Q2ZmpkPK1mg0FBUV0aVLF+666y4Atm/fTkFBAQaDAXB885D0hxnpB/fC1j7TaDQ0bdqUDz/8kIEDBzJgwADi4uIoLS21i357/N527NhBy5YtueuuuzCZTDRp0oS2bdvWq6bpjv71NJ9CA138Y/Xq1RgMBtq3b0/r1q3R6/VoNBoUCgXjxo2je/fuPP/883Yp++zZszRv3hyTyYRSqSQ1NZVHH32UQYMGsWPHDsLDwwGIi4tjypQpCCHs3hwq/WFG+sG9sLXPNm7ciEajoXXr1kRFRVFQUEBQUBDl5eV4eXkxefJkunfvbrPlO239e7vSXnJyMvfeey9jxoxh48aNxMbGEhYWRnZ2NvPmzSMsLOyq9tzRv57m02rUq17uouTm5opHH31UPPjgg2L27NliyJAhYvfu3UKIy00Qe/fuFQMHDhSJiYlVzr3epsjk5GQxbNgw0aVLF3H27FkhhLnJRggh5s6dK+6//36RlJQkhBBi//79YtCgQeLQoUPXVea1kP4wI/3gXtjaZ4cPHxbjx48XDz/8sHjxxRfFhAkTxJkzZ6oco9frxcMPP2yT62/r31tN9v766y8hhBDnzp0Tc+bMqdIcPHz4cPHTTz/Vas8d/etpPq2NBtUknpGRQVhYGIsXL+bVV19l5MiRzJ49m7S0NBQKBUIIunbtSvfu3Vm6dCnp6el88sknwPUNhDAYDPzyyy+MHDmSnj17snjx4ipvTpMmTeLLL7+kbdu2ALRp04Y2bdpc/xe+BtIfZqQf3Adb+yw/P5+PP/6YQYMG8c033/DKK68QGxvL1q1bqxx38uRJANq1a0dmZiZLly4F6nf9bf17q8neu+++S3p6Ok2bNiUxMZHmzZtbjr/99tvJzc2t0Z67+teTfHo1GlTAPnHiBGfPngXMfQMTJkwgODiYX375BcDSb/DKK6+wePFi/va3v5GVlWU5vj4IIVCr1TzwwAOMGzeOl156iXXr1nHgwAHLBVMoFAQGBlrOWbhwIVqtlqZNm9b7u9YF6Q8z0g/ugT18FhwczOTJkxk5ciQAUVFRZGdnExoaCly+9ufPnycwMJAffviBiRMnWvpC63P9bf17q8leSEgIy5cvB6BTp06WUdILFizgjz/+oGfPntXsuLN/PcmnV8XqOrkLo9VqRb9+/SxNG0IIsWPHDtGrVy+h1+uFEEIcO3ZMjB07VjzxxBMiNTXVLjreeOMN8fTTT4vi4mJL80lZWZlYv369GDZsmHjxxRfFhQsX7FJ2ZaQ/zEg/uC+28FnFeRXX+rHHHhMbNmyocswrr7wi4uPjxfTp08W5c+euS7Otf2812du+fbvo3bu30Ov1oqysTDz33HPi0UcfFc8995xV+t3Fv57m09pwu4Cdk5NT4/YKx3z22Wfi0UcfrbJt9OjRlh9QRkaGOHz4sE3LrqCiz6KkpEQMHjxYrF+/XgghRF5enhBCiK1bt4o///yzXmVbiyP8cS0c6Y/CwsIatzvCD7WVXYEr/S5cBVvfS9eyV3HN09LSxPDhwy0P+Qp7q1evFps2bbL6e9RWjq1+b1ez98ADD4jff/9dCCFEeXm55bvUBVv/Jm3hX1vfw/WxdzWfusJ97jYBu6SkRMyYMUMMGzZMvPHGG2Ljxo1CCLOjy8vLLcelp6eLkSNHio8++kgIYb6RJ06cKNLT0+1ethCXL1pCQoIYMWKEmDRpkrj33ntFWVlZvcuviaKiIvH666+LnTt3Vttnb39crWyDwVDlb3v7o7i4WLz11lti/Pjx4r///a84cOCARYe9/XC1sh3tB3fB1veSNfaEEGL9+vXijTfeEEII8frrr4vJkydbgktdsPV950h79flNWmNPiPr519b3sCPtOfo+d5s+7Hnz5lFSUsL8+fNp3bo1r732GufPn0ehUKBWqwFYv349Wq2WGTNmsHnzZl566SUefPBBmjVrZhlOb6+y161bR2JioiUjTl5eHklJSYSHh/Pdd9/h5eV1/U64xNGjR3n88cdZvnw5X3zxBTqdrsp+e/rjWmWrVCrAMf7YunUro0aNwsfHh+eee46ioiI+//xziw57+uFaZTvSD+6Ere+luj4XDh8+DEBSUhJr1qzh/vvvR61W89Zbb6HRaOqk3db3naPtWfubrKu96/Gvre9hR9tz+H1uk7BvZ4qLi8Vzzz0njh07Ztk2ZcoUMXXqVKHVakVKSop44IEHxLPPPisyMzOFEOa3pa1bt4qUlBSHlV3RLLdt2zbx6quviuTk5OsquzbOnTtnaW4ZOnRotQw/KSkpYvTo0XbxhzVl29sfO3furFL+5s2bxfPPPy9yc3OrabG1H6wp21G/C1fH1veSNfaysrKEEEI899xz4qGHHqpXP6qt7ztn2quLf62xV1//2voedqY9R9znLhmwMzIyRElJSZVt48aNEx9//LHl7/T0dDF48GCxf/9+kZSUJLZv3+72ZVujqbi4WAghxI8//ijuvfdeyw0jhBB79uyxzFF057KvpaW0tFTodDpLM9Sff/4pxo4da9m/d+9eu/nBkWW7K7a+l2xhz5p+Xlv/9j3B3rX8a+v7yNXt2RqXCtgGg0H85z//EfHx8WLBggVCiMuDATZt2iSGDRtmebMRwjwab9KkSdVsuFvZ1miqiTFjxoh58+bVasPdyq6vls8++0zMmjXLplqcWba7Yut7ydH3pq1/+55mr7b9tryPXN2evXCpPuxt27ah0+mYMmUKq1atIjU11TKX7cYbbyQ+Pp7333/fcvztt99OVFQUJpPJMteuok/Bncq2RlNljEYjAJMnT2bt2rVcuHCBxYsXc+zYMcsxtvSHo8qur5bk5GRuvfVWABISEjhz5sx1a3Fm2e6Kre8lR9+btv7te5q9mrD1feTq9uyG3V8JrKCsrMzS9/HYY4+Jt956q0oauLNnz4pbb71VfPvtt2LDhg1ixIgR4rvvvnP7sq3RVBsPPPCAuPHGG8W999573f2zzi67PlpKSkrE448/Ln744Qfx5JNPiokTJ4rz58+7ddnuiq3vJUffm7b+7XuavfqWYc195Or27IVLBWwhLjd1JScni/79+4sDBw5Y+g+EMPchfPzxx+KBBx4QK1eubDBlW6OpYlt5ebnQ6XRi3rx5YsCAASIhIaHBlG2NFiHMfUnx8fFi3LhxDvWDvct2V2x9Lzn63rT1b9/T7FlbhhDW30eubs8eOCVgp6amitLSUiFEze3+FdtmzJghnn322QZTtr00Xc+gN2eWbUstWVlZ4rPPPnPLst0VW99Ljr43bf3b9zR7NWHr+8jV7Tkahy6vmZCQwOeff07jxo0pLS3lq6++AsBkMqFQKGpM0n733XfTtWtXNm3axKxZs+jbt6/blW0vTW+88Qb9+vVzu7JtreXNN9+kT58+ble2u2Lre8nR96atf/ueZs8ePr3yPnJ1e07DUW8Ga9euFaNHjxa7du0SJpNJ3HrrrWLPnj1VjklMTBTr16+3ZIVJT08XPXr0EKNGjbqumpwzy3ZFTa7kD+kH98LWPnP0NXB1/a5uT/rUuThslPjWrVsZOHAg3bt3JzMzk5tuuong4OCKlwbmzp3Lc889h0ajwcvLi6KiIj755BMmTZrEkiVLalwpxR3KdkVNruQP6Qf3wtY+c/Q1cHX9rm5P+tS52K1JPDU1lSZNmljWBf3555+ZPXs2AwYMYMeOHXTo0IHs7Gxat27NhAkTOHHiBH379q2Sxq7iXHcq2xU1uZI/pB/cC1v7zNHXwNX1u7o96VMXw9ZV9q1bt4oePXqIoUOHioKCgir7Tp48KV555RWxf/9+IYR5ZZe+ffuKQ4cOWY65nsnnzizbFTW5kj+kH9wLW/vM0dfA1fW7ur2acHXNnnCf27RJPD8/n3Xr1vHqq6/i4+NDQkJClf1hYWGcOnWKFi1aABAdHU3Hjh3JyMiwHFPfyefOLNsVNbmSP6Qf3Atb+8zR18DV9bu6vZpwdc0ec59fb8QvKyursmTZ8ePHhRBC/Pbbb+Kuu+6yLCReMZ9t0qRJYvLkySI/P1/Mnj1b3HffffVe4tCZZbuiJlfyh/SDe2Frnzn6Gri6fle3J33qHlxXH/Z3333H999/T7du3WjcuDETJ06s0vb/2GOP0bp1a1588UVL/8D58+d58803KSoqom3btrz44ov4+/u7VdmuqMmV/CH94F7Y2meOvgaurt/V7UmfuhH1jfTr168XI0eOFImJiWL//v3itttuE6tWrRJCXH6jOXr0qOjfv784efKkEEJYlh8rKSkR+fn59S3aqWW7oiZX8of0g3tha585+hq4un5Xtyd96l6orQnuxcXFBAQEAHDs2DHuuusu2rVrB8CUKVNYuHAhHTt2pEWLFhgMBtq3b8/dd9/N66+/jslkIiQkhHnz5uHn52f1i4Uzy3ZFTa7kD+kH98LWPnP0NXB1/a5uT/rUfanToDODwcDcuXN56qmnmDdvHseOHaNJkyasWrXKcsywYcMIDAzkl19+AS534BcUFLBv3z5uu+025s2bZ7VAZ5btippcyR/SD+6FrX3m6Gvg6vpd3Z70qftzzYC9f/9+/va3v1FSUsJLL72EyWTinXfeoXv37iiVSjZs2GA5duLEiSxbtgyj0YhCoeCnn35CoVCwadMmnnzySavFObNsV9TkSv6QfnAvbO0zR18DV9fv6vakTxsI12ozP3z4sPj+++8tf+/du1c8/fTT4ty5c+Lrr78W9957r2VfXl6eeP7550VGRoYQQlQZwVcfnFm2K2pyJX9IP7gXtvaZo6+Bq+t3dXs14eqa5X1enWvWsOPj47nnnnssC8H7+fmRkZFBbGws48aNw8/PjylTprBr1y5mzpyJEILw8HAA1GqrushdqmxX1ORK/pB+cC9s7TNHXwNX1+/q9qRPGwbXDNgajQY/Pz/LcPkjR44QFxeHSqVCqVQyd+5cbrjhBj799FOaNWvGhx9+iFJpm3wszizbFTW5kj+kH9wLW/vM0dfA1fW7uj131Czv8xqoa1W88jqhS5YsEUKYJ6gnJSUJIYRljVF74MyyXVGTK/lD+sG9sLXPHH0NXF2/q9tzR83yPr9MnV9HVCoVQghycnLQ6XS89NJL/PDDD5a3H29vb7u9VDizbFfU5Er+kH5wL2ztM0dfA1fX7+r23FGzvM8rYU10T0lJEfHx8WLkyJHixx9/tNlbg6uXXRvSH87X4kp+cBds7TNHXwNX1+/q9hxRhqvbc1esCthFRUXis88+syzy7UicWXZtSH84X4sr+cFdsLXPHH0NXF2/q9tzRBmubs9dsdt62BKJRCKRSGxHAx9SJ5FIJBJJw0AGbIlEIpFI3AAZsCUSiUQicQNkwJZIJBKJxA2QAVsikUgkEjdABmyJRCKRSNwAGbAlEolEInED/h9IEAojzYzwqQAAAABJRU5ErkJggg==",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "df = pd.read_pickle('output/search/finetune/Transformer_tra/K3_traHs16_traSrcLR_TPE_traLamb0.0_head4_hs64_bs512_do0.1_lr0.0005_seed1000/pred.pkl')\n",
+ "code = 'SH600157'\n",
+ "date = '2018-09-28'\n",
+ "lookbackperiod = 50\n",
+ "\n",
+ "prob = df.iloc[:, -3:].loc(axis=0)[:, code].reset_index(level=1, drop=True).loc[date:].iloc[:lookbackperiod]\n",
+ "pred = df.loc[:,[\"score_0\",\"score_1\",\"score_2\",\"label\"]].loc(axis=0)[:, code].reset_index(level=1, drop=True).loc[date:].iloc[:lookbackperiod]\n",
+ "e_all = pred.iloc[:,:-1].sub(pred.iloc[:,-1], axis=0).pow(2)\n",
+ "e_all = e_all.sub(e_all.min(axis=1), axis=0)\n",
+ "e_all.columns = [r'$\\theta_%d$'%d for d in range(1, 4)]\n",
+ "prob = pd.Series(np.argmax(prob.values, axis=1), index=prob.index).rolling(7).mean().round()\n",
+ "\n",
+ "fig, axes = plt.subplots(1, 2, figsize=(7, 3))\n",
+ "e_all.plot(ax=axes[0], xlabel='', rot=30)\n",
+ "prob.plot(ax=axes[1], xlabel='', rot=30, color='red', linestyle='None', marker='^', markersize=5)\n",
+ "plt.yticks(np.array([0, 1, 2]), e_all.columns.values)\n",
+ "axes[0].set_ylabel('Predictor Loss')\n",
+ "axes[1].set_ylabel('Router Selection')\n",
+ "plt.tight_layout()\n",
+ "# plt.savefig('select.pdf', bbox_inches='tight')\n",
+ "plt.show()"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# RQ2\n",
+ "\n",
+ "You could prepared the source data for this test as below:\n",
+ "1. Random: Setting `src_info` = \"NONE\"\n",
+ "2. LR: Setting `src_info` = \"LR\"\n",
+ "3. TPE: Setting `src_info` = \"TPE\"\n",
+ "4. LR+TPE: Setting `src_info` = \"LR_TPE\""
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 6,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "exps = {\n",
+ " 'Random': glob.glob('output/search/LSTM_Attn_tra/K10_traHs16_traSrcNONE_traLamb1.0_hs256_bs1024_do0.1_lr0.0001_seed*/pred.pkl'),\n",
+ " 'LR': glob.glob('output/search/LSTM_Attn_tra/K10_traHs16_traSrcLR_traLamb1.0_hs256_bs1024_do0.1_lr0.0001_seed*/pred.pkl'),\n",
+ " 'TPE': glob.glob('output/search/LSTM_Attn_tra/K10_traHs16_traSrcTPE_traLamb1.0_hs256_bs1024_do0.1_lr0.0001_seed*/pred.pkl'),\n",
+ " 'LR+TPE': glob.glob('output/search/finetune/LSTM_Attn_tra/K10_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/pred.pkl')\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 7,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "application/vnd.jupyter.widget-view+json": {
+ "model_id": "910721fc4a7b46eea5ba6d50647320d4",
+ "version_major": 2,
+ "version_minor": 0
+ },
+ "text/plain": [
+ " 0%| | 0/4 [00:00, ?it/s]"
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "res = {\n",
+ " name: backtest_multi(exps[name])\n",
+ " for name in tqdm(exps)\n",
+ "}\n",
+ "report = pd.DataFrame({\n",
+ " k: v[0]\n",
+ " for k, v in res.items()\n",
+ "}).T"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 8,
+ "metadata": {
+ "scrolled": true
+ },
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " MSE | \n",
+ " MAE | \n",
+ " IC | \n",
+ " ICIR | \n",
+ " AR | \n",
+ " VR | \n",
+ " SR | \n",
+ " MDD | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " Random | \n",
+ " 0.159 (0.001) | \n",
+ " 0.321 (0.002) | \n",
+ " 0.048 | \n",
+ " 0.362 | \n",
+ " 11.4% | \n",
+ " 14.1% | \n",
+ " 0.810 | \n",
+ " 21.1% | \n",
+ "
\n",
+ " \n",
+ " LR | \n",
+ " 0.158 (0.001) | \n",
+ " 0.320 (0.001) | \n",
+ " 0.053 | \n",
+ " 0.409 | \n",
+ " 10.3% | \n",
+ " 13.4% | \n",
+ " 0.772 | \n",
+ " 20.8% | \n",
+ "
\n",
+ " \n",
+ " TPE | \n",
+ " 0.158 (0.001) | \n",
+ " 0.321 (0.001) | \n",
+ " 0.049 | \n",
+ " 0.381 | \n",
+ " 10.3% | \n",
+ " 14.0% | \n",
+ " 0.741 | \n",
+ " 21.2% | \n",
+ "
\n",
+ " \n",
+ " LR+TPE | \n",
+ " 0.157 (0.000) | \n",
+ " 0.318 (0.000) | \n",
+ " 0.059 | \n",
+ " 0.460 | \n",
+ " 12.4% | \n",
+ " 14.0% | \n",
+ " 0.885 | \n",
+ " 20.4% | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " MSE MAE IC ICIR AR VR SR MDD\n",
+ "Random 0.159 (0.001) 0.321 (0.002) 0.048 0.362 11.4% 14.1% 0.810 21.1%\n",
+ "LR 0.158 (0.001) 0.320 (0.001) 0.053 0.409 10.3% 13.4% 0.772 20.8%\n",
+ "TPE 0.158 (0.001) 0.321 (0.001) 0.049 0.381 10.3% 14.0% 0.741 21.2%\n",
+ "LR+TPE 0.157 (0.000) 0.318 (0.000) 0.059 0.460 12.4% 14.0% 0.885 20.4%"
+ ]
+ },
+ "execution_count": 8,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "report\n",
+ "# print(report.to_latex())"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# RQ3\n",
+ "\n",
+ "Set `lamb` = 0 to obtain results without Optimal Transport(OT)"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 9,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAUAAAAEDCAYAAABEXN1oAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAUjUlEQVR4nO3de0zV9/3H8RceRExBDcjlMEdtaWRk1EvXOGzqthQNpD30uMz2dNgtSye2GtalW1a1S7nMpM5s7pfW3iLZ7Cgmpdi1JzBvc6ZbaWi3NlvAHS/TYSzdERB+ririgcP5/UE4vyHYc5Dv8cj5PB9JEzl++J43X+3Tc/2cuEAgEBAAGGhatAcAgGghgACMRQABGIsAAjBWfLQHkKT+/n4dOXJEaWlpstls0R4HQIzw+/3q7u5Wfn6+EhMTx/z+TRHAI0eOaM2aNdEeA0CM2r17t+6+++4xl98UAUxLS5M0PGRmZmaUpwEQK86ePas1a9YEG3O1myKAI3d7MzMzNW/evChPAyDWXOuhNZ4EAWAsAgjAWAQQgLFuiscAgalqYGBAHR0d6u/vj/YoRrPZbJozZ47mzp2radPCv11HAIFJ6OjoUHJysubPn6+4uLhoj2OkQCCggYEBdXZ2qqOjQ9nZ2WF/L3eBgUno7+9Xamoq8YuiuLg4JSQk6Atf+IIuXbo0oe8lgMAkEb+bw0Tu+ga/JwJzAMCUwGOAmJJ8/gEl2KbfdMf2DfiVMN3697NbeVyn06n6+nolJibqtddeU0lJiVJTUyVJO3bsUF9fnzZu3Bh6Jp9Pv/rVr3To0CHFx8crMTFR5eXlWrFihd577z398pe/lCSdO3dOQ0NDSk9PlySVl5dr5cqVlvwsk0UAMSUl2Kbr4fr1ETn2m65Xrvt7E6bbVPJjt4XTDGvc7rTsWG73/89XW1ure+65JxjAiaiqqlJfX59+//vfa8aMGTpx4oTWrl2r2bNna/ny5Vq+fLmkiUX1RuMuMBAj3njjDVVXV0uSWltblZubq9bWVknDsaqvr5ck5ebm6tKlS3rllVfU1dWlJ598Uk6nUydPnpQkdXZ2qqysTMXFxVq3bp0uX7485ro+/fRT7du3T1VVVZoxY4YkacGCBXriiSf04osv3ogf1xIEEIgRy5YtU0tLiySppaVFS5Ys0QcffBD8etmyZaPWr1+/Xunp6XrhhRfkdrt1xx13SBrenWn79u3at2+fBgcH1djYOOa6Tpw4oezsbM2ZM2fU5YsXL9axY8ci8NNFBgEEYsStt96qK1eu6OzZs2ppadGPfvQjtbS0yOv1amBgIOzXx917772aNWuW4uLitHDhQp05c2bMmlj5LDUCCMSQgoICvfvuu+rp6dHSpUvV3d2td999V1/96lfDPsbIXVpp+B0Wfr9/zJoFCxbozJkzOn/+/KjL//73vys3N/e657/RCCAQQwoKCrRz504tWbJEknTXXXeppqZmzN3fEbfccosuXLgw4euZN2+eiouLVVVVpStXrkgavlv86quvqry8/Pp/gBuMZ4EBC/kG/JY+Y/vfxw3nZTAFBQV6+umng8ErKChQfX29CgoKxl3/3e9+V88884wSExO1ffv2Cc1UVVWl7du36/7779f06dM1Y8YM/fSnP9XSpUsndJxoirsZPhi9o6NDhYWF+uMf/8iGqAjbzfAymKNHjyovLy8ic2Dirv7zCNUW7gIDMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAgIV8/oEpdVzT8UJowEKR2qZrMlt0Xc2q/QDDdfV1XC2a+woSQMAwVu0HGK5Q1xHNfQXDugvc3t4ul8uloqIiuVwunT59esyanp4erVu3TiUlJcH3CA4ODlo2KIDPF+n9AC9duqTNmzfL4XDI4XBo586dweu+7777dOLEiTFfX+s6RkR7X8GwAlhZWanS0lIdOHBApaWlqqioGLPm1VdfVU5OjhobG9XY2Kh//OMfOnjwoOUDAxhfpPcDfPnllzU0NKTGxka98cYbcrvd+tOf/vS5M13rOkZEe1/BkAHs6emRx+ORw+GQJDkcDnk8HvX29o5aFxcXp0uXLmloaEg+n08DAwPKyMiIzNQAxoj0foAtLS166KGHFBcXp6SkJD3wwAPB4F6vaG9FEDKAXq9XGRkZstmGd6Kw2WxKT0+X1+sdtW7Dhg1qb2/XvffeG/zvK1/5SmSmBjCuSO4HGAgExnwE6MjXNptNQ0NDwctHtsgKJdr7Clr2Mpj9+/crNzdXzc3N+vOf/6yPPvpI+/fvt+rwAMIQyf0A77nnHu3Zs0eBQEAXL17U3r17g8fNzs5WW1ubpOFbiufOnQvrOqK9r2DIZ4Htdrs6Ozvl9/uD/xp0dXXJbrePWldXV6fnnntO06ZNU3Jysu677z59+OGHKi4ujtjwwM3G5x+w9CUr/33ccD6qM5L7AW7YsEFbtmxRSUmJJOnBBx/U1772NUnSD3/4Q23atEkNDQ266667lJWVdc3ruPpxwGjuKxjWfoDf+c53tHr1ajmdTrndbu3Zs0evv/76qDVPPPGE8vPzVV5eLp/Pp8cff1wrV65UaWlpyCHYDxDXg/0AcbWI7AdYVVWluro6FRUVqa6uLvhUe1lZWfBm7zPPPKOPP/5YJSUlWrVqlebPn6+HH37Yip8JACIirBdC5+TkqKGhYczlNTU1wV9nZ2dr165d1k0GABHGe4GBSYr2Szkw7L+fhQ4XAQQmITExUT09PUQwigKBgHw+nz799FPdcsstE/pe3gsMTMK8efPU0dGh7u7uaI9itPj4eM2ePVtz586d2PdFaB7ACNOnT9dtt90W7TFwnbgLDMBYBBCAsQggAGMRQADGIoAAjEUAARiLAAIwFgEEYCwCCMBYBBCAsQggAGMRQADGIoAAjEUAARiLAAIwFgEEYCwCCMBYBBCAsQggAGMRQADGIoAAjEUAARiLAAIwFgEEYCwCCMBYBBCAsQggAGMRQADGIoAAjEUAARiLAAIwFgEEYCwCCMBYBBCAscIKYHt7u1wul4qKiuRyuXT69Olx1+3du1clJSVyOBwqKSnRuXPnrJwVACwVH86iyspKlZaWyul0yu12q6KiQrW1taPWtLW16cUXX9Rvf/tbpaWl6cKFC0pISIjI0ABghZC3AHt6euTxeORwOCRJDodDHo9Hvb29o9a99tpreuyxx5SWliZJSk5O1owZMyIwMgBYI2QAvV6vMjIyZLPZJEk2m03p6enyer2j1p06dUqffPKJ1qxZo29+85t6+eWXFQgEIjM1AFggrLvA4fD7/Tp+/Lh27doln8+ntWvXKisrS6tWrbLqKgDAUiFvAdrtdnV2dsrv90saDl1XV5fsdvuodVlZWSouLlZCQoKSkpJUWFio1tbWyEwNABYIGcDU1FTl5eWpqalJktTU1KS8vDylpKSMWudwONTc3KxAIKCBgQF98MEH+tKXvhSZqQHAAmG9DKaqqkp1dXUqKipSXV2dqqurJUllZWVqa2uTJD3wwANKTU3V/fffr1WrVumOO+7Q6tWrIzc5AExSWI8B5uTkqKGhYczlNTU1wV9PmzZNmzdv1ubNm62bDgAiiHeCADAWAQRgLAIIwFgEEICxCCAAYxFAAMYigACMRQABGIsAAjAWAQRgLAIIwFgEEICxCCAAYxFAAMYigACMRQABGIsAAjAWAQRgLAIIwFgEEICxCCAAYxFAAMYigACMRQABGIsAAjAWAQRgLAIIwFgEEICxCCAAYxFAAMYigACMRQABGIsAAjAWAQRgLAIIwFgEEICxCCAAY4UVwPb2drlcLhUVFcnlcun06dPXXPuvf/1LixYt0rZt26yaEVOQb8Af7RGAkOLDWVRZWanS0lI5nU653W5VVFSotrZ2zDq/36/KykqtWLHC8kExtSRMt6nkx+6IHb9xuzNix4Y5Qt4C7OnpkcfjkcPhkCQ5HA55PB719vaOWbtz50594xvf0Pz58y0fFACsFjKAXq9XGRkZstlskiSbzab09HR5vd5R644dO6bm5mZ973vfi8igAGC1sO4ChzIwMKBnn31WW7duDYYSAG52IQNot9vV2dkpv98vm80mv9+vrq4u2e324Jru7m6dOXNG69atkyR99tlnCgQCunjxorZs2RK56QFgEkIGMDU1VXl5eWpqapLT6VRTU5Py8vKUkpISXJOVlaUPP/ww+PWOHTvU19enjRs3RmZqALBAWC+DqaqqUl1dnYqKilRXV6fq6mpJUllZmdra2iI6IABESliPAebk5KihoWHM5TU1NeOu/8EPfjC5qQDgBuCdIACMRQABGIsAAjAWAQRgLAIIwFgEEICxCCAAYxFAIEZEeg/GWNzj0ZLNEABEH3swThy3AAEYiwACMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBBGAsAgjAWAQQgLGMCCAfFwhgPEZ8LCYfFwhgPEbcAgSA8RBAAMYigACMRQABGIsAAjBWWM8Ct7e3a9OmTTp//rzmzJmjbdu2af78+aPWvPTSS9q7d69sNpvi4+P11FNPafny5ZGYGQAsEVYAKysrVVpaKqfTKbfbrYqKCtXW1o5as3DhQj322GOaOXOmjh07pkcffVTNzc1KTEyMyOAAMFkh7wL39PTI4/HI4XBIkhwOhzwej3p7e0etW758uWbOnClJys3NVSAQ0Pnz562fGAAsEjKAXq9XGRkZstlskiSbzab09HR5vd5rfs8777yj7OxsZWZmWjcpAFjM8neC/OUvf9Hzzz+v3/zmN1YfGgAsFfIWoN1uV2dnp/z+4fe7+v1+dXV1yW63j1n7t7/9TT/5yU/00ksv6fbbb7d+WgCwUMgApqamKi8vT01NTZKkpqYm5eXlKSUlZdS61tZWPfXUU3rhhRf05S9/OTLTAoCFwnodYFVVlerq6lRUVKS6ujpVV1dLksrKytTW1iZJqq6uVn9/vyoqKuR0OuV0OnX8+PHITQ4AkxTWY4A5OTlqaGgYc3lNTU3w12+99ZZ1UwHADcA7QQAYiwACMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBBGAsAgjAWAQQgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBtIDPPzAljw2YLj7aA8SCBNt0PVy/PiLHftP1SkSOC4BbgAAMRgABGIsAAjAWAQQQllh8so8nQQCEJRaf7OMWIABjEUAAxiKAAIxFAAEYiwACMFZYAWxvb5fL5VJRUZFcLpdOnz49Zo3f71d1dbVWrFihlStXqqGhwepZAcBSYQWwsrJSpaWlOnDggEpLS1VRUTFmTWNjo86cOaODBw+qvr5eO3bsUEdHh+UDA4BVQr4OsKenRx6PR7t27ZIkORwObdmyRb29vUpJSQmu27t3rx566CFNmzZNKSkpWrFihfbv36+1a9eGHMLv90uSzp49e70/R0gDfb0RO3ZHR4d8/3s5YseeqjjnNx7nfLSRpow05mohA+j1epWRkSGbzSZJstlsSk9Pl9frHRVAr9errKys4Nd2uz3soHV3d0uS1qxZE9b6m03h4Z9H7tj/UxixY09lnPMbbyqf8+7ubt16661jLr8p3gmSn5+v3bt3Ky0tLRhaAJgsv9+v7u5u5efnj/v7IQNot9vV2dkpv98vm80mv9+vrq4u2e32Mev+/e9/a+HChZLG3iL8PImJibr77rvDWgsAEzHeLb8RIZ8ESU1NVV5enpqamiRJTU1NysvLG3X3V5KKi4vV0NCgoaEh9fb26tChQyoqKprk6AAQOXGBQCAQatGpU6e0adMmffbZZ5o1a5a2bdum22+/XWVlZXryySd15513yu/362c/+5nef/99SVJZWZlcLlfEfwAAuF5hBRAAYhHvBAFgLAIIwFgEEICxCCAAYxFAAMYigACMRQABGIsAAjAWAZwEXkN+43HOb7xYPucEcII6Ojp07NgxSVJcXFyUpzHDeOd8aGgomiPFvPHO+bX21JvKeCtcmAYHB/Xzn/9chw8f1qxZs/Ttb39bq1evZvuuCBrvnH/rW99SfHx88PdHfg1rhPp7HmvnnFuAYWpra9OFCxd0+PBhbd26VXv27JHH45Ek7d69W++9956k2PxXMlrGO+cnT56UNLwD+bPPPqt33nlHUmzfTbuRxjvnx48flyQdOHBAGzduDH7eTyyccwIYwsgfckdHR3CvwytXrujUqVM6ePCgPvnkEx04cEA1NTWShnfMjoW/GNF0rXN+8uRJ7dmzR83NzWpra9Ojjz6qP/zhD/rPf/7DwxGT9Hnn3O126+OPP9aJEydUXl6uv/71r+rr64uJc85d4DA999xzmjlzpi5cuKB//vOfWrRokdxut/bt26ekpCT94he/0Be/+EU98sgjMXc3IVquPudLlizR7373O7355pvBzXZ//etf6/vf/35ww15MztXnfPHixXr77bf19ttvKy0tTY2NjTp06JAWLlyoBx98UGlpadEeeVK4BRjCyL8PBQUFqq2tVWZmpl5//XWtX79eixYtCn6eyYYNG7R//37t2rVLR44ciebIU961zvm6deu0ePHi4MMMR48e1eXLl+V2u3X06NFojjzlXeucP/7441q8eLF8Pp8uX76spUuX6vnnn9fRo0fV19cX5aknjwCGMHIzPzMzUw6HQ8uWLZMkdXV1qa2tTXPnzlUgENDFixd17tw5dXV1acGCBdEcecq71jnv7u5WW1ubZs+eLWn4ManDhw/rypUruu2226I2bywIdc6Tk5M1c+ZMnT17Vlu3blV+fr4yMzOjObIluJ8WpszMTCUlJcntdis3N1fnz59XYWGhkpOTJUlJSUnasWMH/yNa6FrnfNasWfL5fFqwYIEKCwt15513RnvUmPF557y/v199fX165JFHYubvOY8BTsBHH32kt956S62trRocHNTTTz+twsLRH+cXCAQUCAQ0bRo3rq0Q7jmXeF2mVcI559LweZ/q55wATtDg4KDef/99ff3rX4/2KMbgnN94ppxzAjgBV/+LxzOPkcc5v/FMOucEEICxeKAKgLEIIABjEUAAxiKAAIxFAAEYiwACMBYBBGAsAgjAWP8HimDX59TKOMMAAAAASUVORK5CYII=",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "a = pd.read_pickle('output/search/finetune/Transformer_tra/K3_traHs16_traSrcLR_TPE_traLamb0.0_head4_hs64_bs512_do0.1_lr0.0005_seed3000/pred.pkl')\n",
+ "b = pd.read_pickle('output/search/finetune/Transformer_tra/K3_traHs16_traSrcLR_TPE_traLamb2.0_head4_hs64_bs512_do0.1_lr0.0005_seed3000/pred.pkl')\n",
+ "a = a.iloc[:, -3:]\n",
+ "b = b.iloc[:, -3:]\n",
+ "b = np.eye(3)[b.values.argmax(axis=1)]\n",
+ "a = np.eye(3)[a.values.argmax(axis=1)]\n",
+ "\n",
+ "res = pd.DataFrame({\n",
+ " 'with OT': b.sum(axis=0) / b.sum(),\n",
+ " 'without OT': a.sum(axis=0)/ a.sum() \n",
+ "},index=[r'$\\theta_1$',r'$\\theta_2$',r'$\\theta_3$'])\n",
+ "res.plot.bar(rot=30, figsize=(5, 4), color=['b', 'g'])\n",
+ "del a, b"
+ ]
+ },
+ {
+ "cell_type": "markdown",
+ "metadata": {},
+ "source": [
+ "# RQ4\n",
+ "\n",
+ "You could prepared the source data for this test as below:\n",
+ "1. K=1: which is exactly the alstm model\n",
+ "2. K=3: Setting `num_states` = 3\n",
+ "3. K=5: Setting `num_states` = 5\n",
+ "4. K=10: Setting `num_states` = 10\n",
+ "5. K=20: Setting `num_states` = 20\n"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 10,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "exps = {\n",
+ " 'K=1': glob.glob('output/search/LSTM_Attn/hs256_bs1024_do0.1_lr0.0002_seed*/info.json'),\n",
+ " 'K=3': glob.glob('output/search/finetune/LSTM_Attn_tra/K3_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/info.json'),\n",
+ " 'K=5': glob.glob('output/search/finetune/LSTM_Attn_tra/K5_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/info.json'),\n",
+ " 'K=10': glob.glob('output/search/finetune/LSTM_Attn_tra/K10_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/info.json'),\n",
+ " 'K=20': glob.glob('output/search/finetune/LSTM_Attn_tra/K20_traHs16_traSrcLR_TPE_traLamb2.0_hs256_bs1024_do0.1_lr0.0001_seed*/info.json')\n",
+ "}"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 11,
+ "metadata": {},
+ "outputs": [],
+ "source": [
+ "report = dict()\n",
+ "for k, v in exps.items():\n",
+ " \n",
+ " tmp = dict()\n",
+ " for fname in v:\n",
+ " with open(fname) as f:\n",
+ " info = json.load(f)\n",
+ " tmp[fname] = (\n",
+ " {\n",
+ " \"IC\":info[\"metric\"][\"IC\"],\n",
+ " \"MSE\":info[\"metric\"][\"MSE\"]\n",
+ " })\n",
+ " tmp = pd.DataFrame(tmp).T\n",
+ " report[k] = tmp.mean()\n",
+ "report = pd.DataFrame(report).T"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 12,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "image/png": "iVBORw0KGgoAAAANSUhEUgAAAZ0AAADMCAYAAACoen5EAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjMuNCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy8QVMy6AAAACXBIWXMAAAsTAAALEwEAmpwYAAAn90lEQVR4nO3de1hTV7o/8C8JBEvVURQxFNTW1ooHqVREOeAVFEbB4AWxUscOrT4tKDO1tl56EabainPEtjjYo+P11DN2GDtaLketVKXY6lNGS1W8DcVRIAEEqXINJOv3hz/2SFGIEDYQvp/n4XlI9lr7XQvy8mavvdmxEkIIEBERyUDR0QMgIqLug0WHiIhkw6JDRESyYdEhIiLZsOgQEZFsWHSIiEg2LDoW5KuvvsLEiRPh4eGBnJycjh4OUZd269YthIeHw8PDAxs2bOjo4VgMFp02mDJlCr799lvpcXFxMdasWQNfX194eHggMDAQn3zyCaqqqmQZT1xcHN59912cO3cOI0aMkCUm0YNMmTIFbm5uKCsra/S8RqPBs88+i/z8fACATqfDsmXLMHbsWIwePRrBwcH44osvAAD5+fl49tln4eHh0egrLS1Nljl8/vnn6Nu3L86ePYtVq1bJErM7sO7oAViK8vJyzJ8/Hx4eHti/fz+cnZ2h1WqxY8cO3LhxA8OHD2+32PX19bC2tkZhYSGeeeaZVu3DYDBAqVSaeWTUnT3xxBNITU3FwoULAQBXrlxBTU1NozZvvvkmhg8fjuPHj0OlUuHq1asoKSlp1Ob777+HtbV8f6qEEBBCoLCwEEOHDoWVldUj76MhJ6kpHumYya5du/D444/jj3/8I5ydnQEAarUa77zzzgMLTsO7uM8//xy+vr7w9fXFzp07pe1GoxHbtm2Dv78/xo4di9/97ncoLy9v1DcpKQmTJk2SlgAMBgM0Gg38/f0BALm5uVi4cCE8PT0xY8YMpKenS/tftWoV1q5di8WLF2PUqFE4c+YMpkyZgj//+c8IDg7GqFGjsGbNGty6dQuvvPIKPDw88NJLL+Hnn3+W9hEdHQ0fHx+MHj0a4eHhuHbtWqP9x8bGYsmSJfDw8EBoaChu3Lghbb927Rp++9vfwsvLC//5n/+JTz/9tMV5U9ei0Whw8OBB6fHBgwcREhLSqM2FCxcwe/Zs2NnZwdraGiNGjMDEiRNbFW/hwoXYtGkT5s6di9GjR+O1115r9Nr54YcfMH/+fHh6emLmzJk4c+ZMo76bN2/G/Pnz8dxzz+Gtt97CwYMHsWPHDnh4eODbb7+FXq/H+vXrpXxdv3499Ho9AODMmTOYMGECtm3bBh8fH6xevRoJCQmIjo7GihUr4OHhgeDgYOTl5eG///u/4e3tjYkTJyIzM1Maw4EDB/DrX/8aHh4e8PPzw/79+6VtDfvfuXMnvL294evriwMHDkjba2pqsGHDBkyePBmjR4/GCy+8IBX45ubdIQS12uTJk8WpU6eEEEKEhoaKjz/+2OS+N2/eFMOGDROvv/66qKysFJcvXxZjx46V9rdr1y4RGhoqtFqtqK2tFe+++654/fXXG/V98803RWVlpaiurhZCCDFs2DBx/fp1IYQQer1e+Pv7i61bt4ra2lrx7bffilGjRonc3FwhhBArV64Uzz//vMjKyhIGg0HU1NSIyZMni9DQUFFSUiJ0Op0YN26cCAkJERcvXhS1tbVi4cKFIiEhQZpDUlKSuHv3rqitrRXr1q0TM2fOlLatXLlSjBkzRmRnZ4u6ujqxfPly8fvf/14IIcTdu3eFj4+P2LFjh6ipqRF3794VP/zwQ4vzpq6jITemTZsm/vnPf4r6+noxYcIEkZ+fL4YNGyZu3rwphBBi0aJFIiwsTKSkpIiCgoJG+2h4ndfV1ZkU88UXXxS+vr7iypUrorKyUixdulS88cYbQgghdDqd8PLyEidOnBAGg0FkZmYKLy8vUVpaKvWdOHGiuHr1qqirqxN6vV6sXLlSxMfHS/v/6KOPRGhoqLh165YoLS0VYWFhYvPmzUIIIU6fPi1cXV3Fxo0bRW1traiurhaffPKJcHNzExkZGaKurk68+eabYvLkySIxMVHo9Xrx+eefi8mTJ0v7P378uPjXv/4ljEajOHPmjHB3dxcXLlxotP+PPvpI6PV6ceLECeHu7i7Ky8uFEELExMSIF198Ueh0OlFfXy/+8Y9/iNra2hbn3RF4pGMm5eXlcHBweOR+UVFRsLOzw7PPPovZs2cjJSUFwL315Ndffx0DBw6ESqXC0qVLceTIEdTX10t9ly1bBjs7O/To0aPJfrOzs1FVVYUlS5ZApVLB29sbkydPRmpqqtTGz88Po0ePhkKhgK2tLQDgxRdfRP/+/eHo6AhPT0+4u7tjxIgRUKlUmDp1aqMLFObOnYuePXtCpVJh2bJluHz5Mu7evSttnzp1Ktzd3WFtbY2ZM2fi0qVLAIATJ06gf//+iIiIgK2tLXr27InnnnvO5HlT19FwtHPq1Ck89dRTcHR0bLT9448/hqenJxITE+Hn5weNRoMff/yxUZtx48bB09NT+srNzW023rBhw2BnZ4ff/e53OHz4MAwGAw4dOoQJEyZg4sSJUCgU8PHxgZubG06ePCn1nTVrFp555hlYW1vDxsamyb6Tk5MRFRWFfv36wd7eHlFRUfjyyy+l7QqFAtHR0VCpVFJOenp6Yvz48bC2tkZgYCBu376NJUuWwMbGBtOnT0dBQQHu3LkDAJg0aRIGDRoEKysreHl5wcfHB1lZWdL+ra2tERUVBRsbG0ycOBF2dnbIy8uD0WjEgQMH8Pbbb8PR0RFKpRLPP/88VCqVSfOWGxcdzaRPnz5N1qJNoVarpe+feOIJXL16FQBQWFiIqKgoKBT/fl+gUChQWloqPR44cOBD91tcXIyBAwc26u/k5ISioqIHxm7Qv39/6XtbW9tGj3v06CFdFGEwGLB582YcPnwYZWVlUpzbt2+jV69eTfZ1f1+tVotBgwY9cNzNzfuXf7Co89NoNHjxxReRn58PjUbTZPuvfvUrrFixAitWrEBZWRk2btyIqKgoZGRkSG1Onz5t8vmR+1/TTk5OqKurw+3bt1FYWIjDhw/j+PHj0vb6+nqMHTv2gX0fpLi4GE5OTo32X1xcLD3u27ev9OatQb9+/aTve/Togb59+0rnThsKU1VVFXr37o2TJ0/iT3/6E65fvw6j0YiamhoMGzZM6t+nT59GP4fHHnsMVVVVuH37Nmpra+Hi4tJkzKbMW24sOmbi7e2Nr776CkuXLm30B7MlWq0WQ4cOBXDvBTJgwAAA9wrKBx98gNGjRzfp03DlT3MnOAcMGACdTgej0SiNR6vVYsiQISaPrTnJyclIT0/Hrl274OzsjLt372LMmDEQJty0XK1WNzriul9z86au54knnoCzszNOnjyJ9evXN9vW3t4eERER+Pvf/97q83harbbR9zY2Nujbty/UajU0Gg3WrVv30L4tXTAwYMCARhfraLVaKV9N6d8cvV6P6OhoxMXFwc/PDzY2NoiMjDQpnxqK3c2bN5ucPzZl3nLj8pqZ/Pa3v0VlZSVWrlyJgoICAEBRURE+/PBDXL58+aH9EhMTUV1djWvXruGLL77A9OnTAQAvvPACPvroI2lfZWVlOHbsmMnjcXd3x2OPPYY///nPqKurw5kzZ/D1119L+2+ryspKqFQq9O3bF9XV1YiPjze576RJk3Dr1i3s3r0ber0eFRUVyM7OBtD2eVPns379euzZswd2dnZNtv3xj3/E1atXUV9fj4qKCvzlL3/B4MGD0bdv31bF+vLLL/HPf/4T1dXV+PjjjxEQEAClUomZM2fi+PHj+Oabb2AwGFBbW4szZ85Ap9OZvO8ZM2Zg69atKCsrQ1lZGf70pz8hODi4VeP8Jb1eD71eD3t7e1hbW+PkyZM4deqUSX0VCgXmzJmDDz/8EEVFRTAYDDh37hz0er1Z5m1uLDpm0qdPH/zlL3+BtbU15s2bBw8PDyxatAi9evXC4MGDH9rPy8sLU6dOxUsvvYSIiAj4+voCAH7zm99gypQpiIiIgIeHB+bNm9dkrbs5KpUKW7duRUZGBsaNG4fY2Fhs3LhROqpqq5CQEDg5OWH8+PGYMWMGRo0aZXLfnj17YufOnTh+/Dh8fHwQEBAgXVHT1nlT5zNo0CCMHDnygdtqamqwdOlSjBkzBv7+/igsLMTWrVsbtRkzZkyj/9PZtWvXQ2NpNBqsWrUKPj4+0Ov1ePvttwHce8efmJjY6MqxHTt2wGg0mjyPyMhIuLm5YebMmZg5cyb+4z/+A5GRkSb3b07Pnj3xzjvv4Pe//z3GjBmDlJQUTJkyxeT+K1euxLBhwzB37lx4eXnhv/7rv2A0Gs0yb3OzEqYcv5HZ5efnw8/PDxcvXuT1/ERmsHDhQsycOROhoaEdPRRqBo90iIhINiw6REQkGy6vERGRbHikQ0REsunSZ7Brampw4cIFODg48GaVZHYGgwElJSVwc3N74F0fuirmDbWnlvKmSxedCxcuIDw8vKOHQRZu37598PT07OhhmA3zhuTwsLzp0kWn4V5n+/bta/aWMEStodPpEB4e3qp76nVmzBtqTy3lTZcuOg1LAwMHDpQ+ToDI3CxtCYp5Q3J4WN7wQgIiIpINiw4REcmGRYeIiGTDokNERLJh0SEiItmw6BARkWxYdIiISDYsOkREJBsWHSIikg2LDhERycakopOXl4ewsDAEBAQgLCwM169fb9LGYDAgNjYW/v7+mDp1KpKSkhptT0tLQ3BwMIKCghAcHIxbt26Z1I+IiCyHSfdeW7t2LRYsWACNRoNDhw7hvffew969exu1SU5Oxo0bN3D06FGUl5cjJCQE3t7ecHZ2xvnz57Flyxbs2bMHDg4OuHv3LlQqVYv9iIjIsrR4pFNaWoqcnBwEBQUBAIKCgpCTk4OysrJG7dLS0hAaGgqFQgF7e3v4+/vj8OHDAIDdu3cjIiJCuutor169YGtr22I/IiKyLC0WHa1WC0dHR+mOoUqlEgMGDIBWq23SzsnJSXqsVquh0+kAALm5ubh58ybCw8Mxa9YsJCYmouFTspvrR9SVmbIsnZmZidmzZ8PNzQ1xcXGNtiUkJMDb2xsajQYajQaxsbHSttLSUixZsgTBwcEIDAxETEwM6uvr23tKRG0my0cbGAwGXLlyBbt27YJer8crr7wCJycnhISEyBGeqEOYsizt4uKCdevW4ciRI9Dr9U32ERISgpUrVzZ5/tNPP8XQoUOxbds21NXVYcGCBTh69CimT5/ebvMhMocWj3TUajWKiopgMBgA3CsgxcXFUKvVTdoVFhZKj7VarfQBUU5OTggMDIRKpULPnj3h5+eHH3/8scV+RF2VqcvSgwcPxogRI2Bt/Wjv/6ysrFBZWQmj0Qi9Xo+6ujo4OjqabfxE7aXFotOvXz+4uroiJSUFAJCSkgJXV1fY29s3ahcYGIikpCQYjUaUlZXh2LFjCAgIAHAv4TIzMyGEQF1dHU6fPo3hw4e32I+oqzJ1WbolqampCA4ORkREBM6dOyc9HxkZiby8PPj6+kpfo0ePNusciNqDSZdMx8TE4LPPPkNAQAA+++wzaW158eLFOH/+PABAo9HA2dkZ06ZNw7x58xAVFQUXFxcAwIwZM9CvXz9Mnz4dISEhePrppzF37twW+xF1Z/Pnz0d6ejqSk5Px8ssvIzIyErdv3wYAHD58GM8++ywyMzORkZGBrKwsXoBDXYJJx/RDhw594P/PbN++XfpeqVQ2OtF5P4VCgdWrV2P16tVNtjXXj7oWfZ0BKpvWfbRzW/p2RvcvSyuVyocuSzfn/s+Y9/HxgVqtxrVr1+Dl5YXPPvsMH3zwARQKBXr16oUpU6bgzJkzCAwMbI/pEJmNLBcSUPegslEi+I1DreqbvElj5tF0rPuXpTUazUOXpZtTVFQknae5dOkSCgoK8OSTTwIAnJ2dkZGRAXd3d+j1enz33XeYOnVqu8yFyJxYdCxMW48YuuIRR2c9woqJicGqVauQmJiI3r17S5dEL168GNHR0Rg5ciSysrKwfPlyVFRUQAiB1NRUrF+/HuPHj0d8fDwuXrwIhUIBGxsbbNy4UTr6WbNmDdauXYvg4GAYDAaMHTsW8+bNa5d5EJkTi46FacvRBtA1jzg66xGWKcvSnp6eyMjIeGD/X/7fzv0GDRqEXbt2tX2QRDLjDT+JiEg2LDpERCQbFh0iIpINiw4RNVFTX9Mhfcny8UICImqih3UPWMVataqvWCvMPBqyJDzSISIi2bDoEBGRbFh0iIhINiw6REQkGxYdIiKSDYsOERHJhkWHiIhkw6JDRESyYdEhIiLZsOgQEZFsWHSIiEg2LDpERCQbFh0iIpINiw4REcmGRYeIOg1+jo/l4+fpEFGnwc/xsXw80iEiItmw6BARkWxYdIiISDYsOkREJBsWHSIikg2LDhERyYZFh4iIZMOi0070dYYO6UtE1JmZ9M+heXl5WLVqFcrLy9GnTx/ExcVhyJAhjdoYDAasW7cO33zzDaysrLBkyRKEhoYCABISEvC///u/GDBgAADg+eefx9q1a1vc1pWpbJQIfuNQq/omb9KYeTRERJ2DSUVn7dq1WLBgATQaDQ4dOoT33nsPe/fubdQmOTkZN27cwNGjR1FeXo6QkBB4e3vD2dkZABASEoKVK1c+cP/NbSMiIsvR4vJaaWkpcnJyEBQUBAAICgpCTk4OysrKGrVLS0tDaGgoFAoF7O3t4e/vj8OHD7fPqIm6gLy8PISFhSEgIABhYWG4fv16kzaZmZmYPXs23NzcEBcX12hbQkICvL29odFooNFoEBsbK2176623pOc1Gg2GDx+O9PT09p4SUZu1eKSj1Wrh6OgIpVIJAFAqlRgwYAC0Wi3s7e0btXNycpIeq9Vq6HQ66XFqaioyMzPh4OCAZcuWwcPDw6RtRF2VKSsELi4uWLduHY4cOQK9Xt9kHw9bBdi4caP0/eXLl7Fo0SKMHz/e/JMgMjNZLiSYP38+0tPTkZycjJdffhmRkZG4fft2i9uIuipTVwgGDx6MESNGwNq69ffe/dvf/obg4GCoVKo2jZlIDi0WHbVajaKiIhgM966oMhgMKC4uhlqtbtKusLBQeqzVajFw4EAAgIODA2xsbAAAPj4+UKvVuHbtWovbiLqq5lYIHkVqaiqCg4MRERGBc+fONdmu1+uRnJyMOXPmmGXcRO2txaLTr18/uLq6IiUlBQCQkpICV1fXRktrABAYGIikpCQYjUaUlZXh2LFjCAgIAAAUFRVJ7S5duoSCggI8+eSTLW4j6s5MWQU4duwYnJyc4Orq2kGjJHo0Jh3Tx8TEYNWqVUhMTETv3r2lE56LFy9GdHQ0Ro4cCY1Gg+zsbEybNg0AEBUVBRcXFwBAfHw8Ll68CIVCARsbG2zcuBEODg4tbiPqqu5fIVAqlQ9dIWjO/Xlw/yqAl5eX9PyBAwd4lENdiklFZ+jQoUhKSmry/Pbt26XvlUplo6tr7vfLq3JM3UbUVd2/QqDRaB66QtCcoqIiODo6AnjwKoBOp8M//vEPbNq0yezjJ2ov/ORQonZiygpBVlYWli9fjoqKCgghkJqaivXr12P8+PEtrgL8/e9/x+TJk9GnT58OmiHRo2PRIWonpqwQeHp6IiMj44H9W1oFeO2119o2QKIOwHuvERGRbFh0iIhINiw6REQkGxYdIur2auprOrR/R2jLmNvSlxcSEFG318O6B6xirVrdX6wVre5bU1+DHtY9ZO/bljm3Zb4sOkREHaij/vh3FC6vERGRbCy+6PBjo4mIOg+LX17jx0YTEXUeFn+kQ0REnQeLDhERyYZFh4iIZMOiQ0REsmHRISIi2bDoEBGRbFh0iIhINiw6REQkGxYdIiKSDYsOERHJhkWHiIhkw6JDRESyYdEhIiLZsOgQEZFsWHSIiEg2LDpERCQbFh0iIpINiw4REcmGRYeIiGTDokNERLJh0SEiItmYVHTy8vIQFhaGgIAAhIWF4fr1603aGAwGxMbGwt/fH1OnTkVSUpK0LSEhAd7e3tBoNNBoNIiNjTWpHxERWRZrUxqtXbsWCxYsgEajwaFDh/Dee+9h7969jdokJyfjxo0bOHr0KMrLyxESEgJvb284OzsDAEJCQrBy5com+26pHxERWY4Wj3RKS0uRk5ODoKAgAEBQUBBycnJQVlbWqF1aWhpCQ0OhUChgb28Pf39/HD58uMUBtLYfUWdnygpBZmYmZs+eDTc3N8TFxTXa1twKAXAvd4KDgxEUFITg4GDcunWrPadDZBYtHulotVo4OjpCqVQCAJRKJQYMGACtVgt7e/tG7ZycnKTHarUaOp1OepyamorMzEw4ODhg2bJl8PDwMKkfUVdlygqBi4sL1q1bhyNHjkCv1zfZx8NWCM6fP48tW7Zgz549cHBwwN27d6FSqdptLkTmIsuFBPPnz0d6ejqSk5Px8ssvIzIyErdv35YjNFGHMHWFYPDgwRgxYgSsrU1a6Zbs3r0bERERcHBwAAD06tULtra25hk8UTtqseio1WoUFRXBYDAAuHfiv7i4GGq1ukm7wsJC6bFWq8XAgQMBAA4ODrCxsQEA+Pj4QK1W49q1ay32I+qqmlsheBSpqakIDg5GREQEzp07Jz2fm5uLmzdvIjw8HLNmzUJiYiKEEGadA1F7aLHo9OvXD66urkhJSQEApKSkwNXVtdHSGgAEBgYiKSkJRqMRZWVlOHbsGAICAgAARUVFUrtLly6hoKAATz75ZIv9iLqz5lYIDAYDrly5gl27duF//ud/kJGRgUOHDnXwiIlaZtIxfUxMDFatWoXExET07t1bOuG5ePFiREdHY+TIkdBoNMjOzsa0adMAAFFRUXBxcQEAxMfH4+LFi1AoFLCxscHGjRulZYHm+hF1VfevECiVyoeuEDSnIUeAxisEXl5ecHJyQmBgIFQqFVQqFfz8/PDjjz8iJCSkHWZDZD4mFZ2hQ4c+8P9ntm/fLn2vVCqbXF3T4JdX5dyvuX5EXdX9KwQajeahKwTNKSoqgqOjI4CmKwRBQUE4efIkNBoN6uvrcfr0aa4QUJfwaGcvichkpqwQZGVlYfny5aioqIAQAqmpqVi/fj3Gjx/f7ArBjBkzcOHCBUyfPh0KhQK+vr6YO3duR06XyCQsOkTtxJQVAk9PT2RkZDywf3MrBAqFAqtXr8bq1avbPlAiGfHea0REJBsWHSIikg2LDhERyYZFh4iIZMOiQ0REsmHRISIi2bDoEBGRbFh0iIhINiw6REQkGxYdIiKSDYsOERHJhkWHiIhkw6JDRESyYdEhIiLZsOgQEZFsWHSIiEg2LDpERCQbFh0iIpINiw4REcmGRYeIiGTDokNERLJh0SEiItmw6BARkWxYdIiISDYsOkREJBsWHSIikg2LDhERyYZFh4iIZMOiQ0REsmHRISIi2bDoEBGRbEwqOnl5eQgLC0NAQADCwsJw/fr1Jm0MBgNiY2Ph7++PqVOnIikpqUmbn376Cc899xzi4uKk5xISEuDt7Q2NRgONRoPY2NjWz4aoEzElbzIzMzF79my4ubk1ygug+dxg3lBXZW1Ko7Vr12LBggXQaDQ4dOgQ3nvvPezdu7dRm+TkZNy4cQNHjx5FeXk5QkJC4O3tDWdnZwD3itLatWvh7+/fZP8hISFYuXKlGaZD1HmYkjcuLi5Yt24djhw5Ar1e32QfzeUG84a6ohaLTmlpKXJycrBr1y4AQFBQEN5//32UlZXB3t5eapeWlobQ0FAoFArY29vD398fhw8fxiuvvAIA2LZtGyZNmoSqqipUVVWZZfAGgwEAoNPpmm1XV1XWqv3n5+e3ql9XjduRsTtj3IbXVcPr7FGYmjeDBw8GAKSnpz+w6LQHU/PGusKk96RNtPV32dXidmTszhi3pbxpMaJWq4WjoyOUSiUAQKlUYsCAAdBqtY2SR6vVwsnJSXqsVqul4JcvX0ZmZib27t2LxMTEJjFSU1ORmZkJBwcHLFu2DB4eHi0NCwBQUlICAAgPDzep/aPy+3pDu+y3s8btyNidOW5JSYlUHExlat60pLncaO+8eQpPmTzO+/l96deqfl01bkfG7sxxH5Y3rS/tJqqrq8O7776LDz/8UErA+82fPx+vvvoqbGxscOrUKURGRiItLQ19+/Ztcd9ubm7Yt28fHBwcHrhvorYwGAwoKSmBm5tbh8RvLjeYN9RZtZQ3LRYdtVqNoqIiGAwGKJVKGAwGFBcXQ61WN2lXWFgId3d3AP8+8ikpKcGNGzewZMkSAMCdO3cghEBFRQXef/99ODg4SPvw8fGBWq3GtWvX4OXl1eLkevToAU9PzxbbEbXWox7hNDA1b5rTXG4wb6gzay5vWrx6rV+/fnB1dUVKSgoAICUlBa6urk2WCAIDA5GUlASj0YiysjIcO3YMAQEBcHJywpkzZ/D111/j66+/xqJFizBv3jy8//77AICioiJpH5cuXUJBQQGefPLJVk2UqLMwNW+a01xuMG+oqzJpeS0mJgarVq1CYmIievfuLV3auXjxYkRHR2PkyJHQaDTIzs7GtGnTAABRUVFwcXFpcd/x8fG4ePEiFAoFbGxssHHjxkbv4oi6KlPyJisrC8uXL0dFRQWEEEhNTcX69esxfvz4ZnODeUNdlZUQQnT0IIiIqHvgHQmIiEg2LDpERCQbFh0iIpINiw4REcmGRYeIiGTDotPJdNTFhEajsUPiEpkD86brYNG5T21tbYfEvXPnDk6fPo3q6mpYWVnJFvfnn3/Gjh07UFdXB4WiY18K9fX13SquJWHedJyumDftfu+1rmLLli04fvw4oqOjMXHiRBiNRlleUDt37sTf/vY3DBkyBNbW1oiMjMTw4cNliZuWloZx48bB2toaQghZE/d+8fHxKC8vh4+PD8aPHw87OztZxtNRcS0J84Z586hxu33RMRgM2L9/P9LT0zFq1CikpaXB29sbKpWqXX+BNTU1SExMxLVr17Bjxw6o1WpoNBoUFRVh+PDh7Ra7srISGzZswPHjx3Ho0CH069fP7DFMdefOHbzzzjtQqVSYNGkSPvvsM2RnZ+PVV19F7969LS6uJWHeMG9aG7fbFx2lUonx48fD398fJSUl2LlzJw4ePIh58+a1a1yVSoV58+ZJH3KXm5sLOzs7lJaWoqqqCnZ2du0S19bWFs899xyqqqrw+OOPQ6vVIj09HUOGDIG3t7esdx2uqqpCfn4+vvjiCwCAk5MTDhw4gH379uG1116zuLiWhHnDvGlt3G59Tqfh5KOTkxMcHR3x9NNPw9PTExkZGdDpdLCysmqXE5RCCCgUCilxzp49i6ioKIwePRonTpxAXFwcvvvuO7PHNRgMsLa2xtixY2Fvb49f//rXWLp0KcrKyvCHP/wBmzdvRkFBgdnjNvjl2r/RaMSgQYNw9uxZAMDIkSMxYcIE/PDDD/jpp58AmOcEcUVFBfLz86UPlTIajXBxcWn3uJaKecO8aUvcbll0Gq44aTgMt7a+d8DXcMv3Pn364K9//avUxlwJ9Mu4DZ5++mkcPHgQK1aswJo1a1BTU4ObN2+aJeb9cRvejbm4uMDX1xcajQZ79uxBdHQ0Nm3ahO+//x4VFRVmi3u/LVu2YMGCBTh58qT0XI8ePWA0GpGXl4fq6mrY2Nhg+PDhcHJykl7EbV0q2bFjB+bMmYN169ZhxYoV0Ov10hp0e8a1RMwb5o058qbbFB0hBIQQjU50njx5Env27EFlZaXUbsiQIfD398f169exfft2vP/++/jXv/7VrnF79+6NHj16AAAGDhyImpqaNt8x+GFxd+/eDb1ej3HjxmHZsmXo2bMngHvvWlQqldmvRDIYDNi3bx/S09Ph7u6OtLQ06WOZ7e3tMXbsWJw6dQrXrl0DcO9zOPLy8qSEb+0frobPazp79ix2796NzZs3Izc3F1999RX69OkDT0/PdolraZg3zBtz541FFx2dTof9+/cDgHSCUaFQ4ObNm3jjjTewbds2eHp6Si9c4N6asa2tLb7//nvs378fnp6eGDJkSLvFvf+XlJ+fj7feegvl5eV45pln2mW+Y8aMgVKphK2trfQOrrCwEG+88QYef/zxR55rSxrW/j/99FPMmTMHdXV1OHjwoLQ9LCwMjz/+OJKSkpCVlYWKigoYDAb06tULQOvfsalUKnh6euKTTz6BWq3GY489Bl9fX5w6dQoAsGDBgnaJawmYN8ybds0bYcHefPNNsXnzZulxXV2d2LRpk5g1a5b48ssvhRBC7Ny5U3zwwQfi7t27QgghdDqdmDp1qvjkk09kiVtRUSFqa2vFX//6VxEcHCy2b98uS9y7d+8KvV4v9u3b1+a4D2M0GqVxCCFEdXW12Ldvn4iKihI6nU5ql5+fL3bv3i0WLlwopk2b1uaxGAwGIYSQfqf19fVCCCGio6NFamqq1K6goMCscS0F84Z5I0T75Y3FFZ2ffvpJGAwGcefOHTFr1iyRnZ0thBDi/PnzIjw8XMTHxwu9Xi9OnTolXnjhBbF69WpRWlraaB+VlZWyx/3pp5/EnTt3ZI+bk5Mjfv7550eO25yGF++DXLlyRbz99tvi448/lp5rSLL8/HxRU1Nj9rgNyfPSSy+JK1euNNne1riWgHnDvPml9sobiyo6JSUlwtvbW2i1WnH27Fnx8ssvi9raWiHEvR9QaWmpuHHjhoiOjhYRERHSC02I5n/h7Rm34RfbleI+iNFoFEajsdHP8cSJE2L37t2ioqJCeq62tlYcP35cvP7662Lbtm3iD3/4g8jNzW2XuPf/EdTpdCI8PFwIIURmZqZISEgQxcXFrY5rSZg3zJv747Z33ljU/+n0798fGo0GqampqK6uxqhRo6BSqVBfX48nnngCWVlZ2Lp1K4KCgjBr1iwA/z5x2Jb/om5L3LZc399RcRvodDqcOHEC8+fPl36GVlZWuHnzJj766CPodDqsWbPmoWv/2dnZWLFiBZ566ql2iWtrayut0X/zzTeoq6vD22+/jatXr+K1117jxzv/f8wb5o2sedOmktUJVVdXiwkTJogxY8aI3/zmN+L//u//HnhoKIR537V0t7hCdI21/4allw8//FCMHTtW7Nmzp9VxLVl3e/0ybzoub6yEsLxrQ48dO4YNGzYgJCQEubm5uHLlClQqFcaNGwd3d3dMnz6dcVspLy8PgwcPRmVlJRYtWoSYmBi4u7vjwoUL2LBhA0aPHo2lS5fi+++/x5YtWzBkyBCsWLEC9vb20j5a85/jbY373XffYcSIEfjVr35ltp+FpekOr9+Oisu8uY/ZylcnYjQaxcyZM8W3334rhBCiuLhYfPXVVyI+Pl5kZWUxbit1xbX/hiuAqGWW/vrtqLjMm8Ys8kgHAC5cuICYmBhs374dffv2ZVwziYuLQ//+/VFdXQ0AWLp0Kerr62Ftbd1ua/8dGbe7sfTXb0fFZd78m8UWHQB49dVX8corr8DT05NxzaSmpgYBAQGorq6Gq6srXnjhBTz11FMYNmxYk7YGg8FsN0LsqLjdkSW/fjsqLvPm3yy66HTUHx9Lj9sd1uC7M0t//XZUXObNPRZ1yfQvddS7XUuP6+fnh4SEBOkkZElJCbKzs3H+/Hk4OjpaXNzuxtJfvx0Vl3nz/7Xb2SKyaOfPnxdz5swRZWVl3SIukTkwb4TgWVZqFTc3N/Tv3x+5ubndIi6ROTBvLPycDrUvS1+DJ2oP3T1vWHSIiEg2XF4jIiLZsOgQEZFsWHSIiEg2LDpERCQbFh0iIpINiw4REcnm/wHMDWmf8u7bhwAAAABJRU5ErkJggg==",
+ "text/plain": [
+ ""
+ ]
+ },
+ "metadata": {},
+ "output_type": "display_data"
+ }
+ ],
+ "source": [
+ "fig, axes = plt.subplots(1, 2, figsize=(6,3)); axes = axes.flatten()\n",
+ "report['IC'].plot.bar(rot=30, ax=axes[0])\n",
+ "axes[0].set_ylim(0.045, 0.062)\n",
+ "axes[0].set_title('IC performance')\n",
+ "report['MSE'].astype(float).plot.bar(rot=30, ax=axes[1], color='green')\n",
+ "axes[1].set_ylim(0.155, 0.1585)\n",
+ "axes[1].set_title('MSE performance')\n",
+ "plt.tight_layout()\n",
+ "# plt.savefig('sensitivity.pdf')"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": 13,
+ "metadata": {},
+ "outputs": [
+ {
+ "data": {
+ "text/html": [
+ "\n",
+ "\n",
+ "
\n",
+ " \n",
+ " \n",
+ " | \n",
+ " IC | \n",
+ " MSE | \n",
+ "
\n",
+ " \n",
+ " \n",
+ " \n",
+ " K=1 | \n",
+ " 0.053247 | \n",
+ " 0.157792 | \n",
+ "
\n",
+ " \n",
+ " K=3 | \n",
+ " 0.055535 | \n",
+ " 0.157410 | \n",
+ "
\n",
+ " \n",
+ " K=5 | \n",
+ " 0.059224 | \n",
+ " 0.156796 | \n",
+ "
\n",
+ " \n",
+ " K=10 | \n",
+ " 0.059403 | \n",
+ " 0.156766 | \n",
+ "
\n",
+ " \n",
+ " K=20 | \n",
+ " 0.059193 | \n",
+ " 0.156801 | \n",
+ "
\n",
+ " \n",
+ "
\n",
+ "
"
+ ],
+ "text/plain": [
+ " IC MSE\n",
+ "K=1 0.053247 0.157792\n",
+ "K=3 0.055535 0.157410\n",
+ "K=5 0.059224 0.156796\n",
+ "K=10 0.059403 0.156766\n",
+ "K=20 0.059193 0.156801"
+ ]
+ },
+ "execution_count": 13,
+ "metadata": {},
+ "output_type": "execute_result"
+ }
+ ],
+ "source": [
+ "report"
+ ]
+ },
+ {
+ "cell_type": "code",
+ "execution_count": null,
+ "metadata": {},
+ "outputs": [],
+ "source": []
+ }
+ ],
+ "metadata": {
+ "interpreter": {
+ "hash": "9de784e21d4a351f53a5792b09a6ae66a23802b850ad98f62e10c0156e418c04"
+ },
+ "kernelspec": {
+ "display_name": "Python 3.8.5 64-bit ('base': conda)",
+ "name": "python3"
+ },
+ "language_info": {
+ "name": "python",
+ "version": ""
+ }
+ },
+ "nbformat": 4,
+ "nbformat_minor": 5
+}
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_alstm.yaml b/examples/benchmarks/TRA/configs/config_alstm.yaml
new file mode 100644
index 0000000000..573745e775
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_alstm.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 256
+ num_layers: 2
+ num_heads: 2
+ use_attn: True
+ dropout: 0.1
+
+num_states: &num_states 1
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0002
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/alstm
+ model_type: LSTM
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 1.0
+ rho: 0.99
+ freeze_model: False
+ model_init_state:
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 1024
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_alstm_tra.yaml b/examples/benchmarks/TRA/configs/config_alstm_tra.yaml
new file mode 100644
index 0000000000..aa18f4f374
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_alstm_tra.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 256
+ num_layers: 2
+ num_heads: 2
+ use_attn: True
+ dropout: 0.1
+
+num_states: &num_states 10
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0001
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/alstm_tra
+ model_type: LSTM
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 2.0
+ rho: 0.99
+ freeze_model: True
+ model_init_state: output/test/alstm_tra_init/model.bin
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 1024
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_alstm_tra_init.yaml b/examples/benchmarks/TRA/configs/config_alstm_tra_init.yaml
new file mode 100644
index 0000000000..0f5b9269db
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_alstm_tra_init.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 256
+ num_layers: 2
+ num_heads: 2
+ use_attn: True
+ dropout: 0.1
+
+num_states: &num_states 3
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0002
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/alstm_tra_init
+ model_type: LSTM
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 1.0
+ rho: 0.99
+ freeze_model: False
+ model_init_state:
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 512
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_transformer.yaml b/examples/benchmarks/TRA/configs/config_transformer.yaml
new file mode 100644
index 0000000000..2ffbe20b8b
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_transformer.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 64
+ num_layers: 2
+ num_heads: 4
+ use_attn: False
+ dropout: 0.1
+
+num_states: &num_states 1
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0002
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/transformer
+ model_type: Transformer
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 1.0
+ rho: 0.99
+ freeze_model: False
+ model_init_state:
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 1024
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_transformer_tra.yaml b/examples/benchmarks/TRA/configs/config_transformer_tra.yaml
new file mode 100644
index 0000000000..e68bcc4501
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_transformer_tra.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 64
+ num_layers: 2
+ num_heads: 4
+ use_attn: False
+ dropout: 0.1
+
+num_states: &num_states 3
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0005
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/transformer_tra
+ model_type: Transformer
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 1.0
+ rho: 0.99
+ freeze_model: True
+ model_init_state: output/test/transformer_tra_init/model.bin
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 512
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/configs/config_transformer_tra_init.yaml b/examples/benchmarks/TRA/configs/config_transformer_tra_init.yaml
new file mode 100644
index 0000000000..927e74e52f
--- /dev/null
+++ b/examples/benchmarks/TRA/configs/config_transformer_tra_init.yaml
@@ -0,0 +1,63 @@
+qlib_init:
+ provider_uri: "~/.qlib/qlib_data/cn_data"
+ region: cn
+
+data_loader_config: &data_loader_config
+ class: StaticDataLoader
+ module_path: qlib.data.dataset.loader
+ kwargs:
+ config:
+ feature: data/feature.pkl
+ label: data/label.pkl
+
+model_config: &model_config
+ input_size: 16
+ hidden_size: 64
+ num_layers: 2
+ num_heads: 4
+ use_attn: False
+ dropout: 0.1
+
+num_states: &num_states 3
+
+tra_config: &tra_config
+ num_states: *num_states
+ hidden_size: 16
+ tau: 1.0
+ src_info: LR_TPE
+
+task:
+ model:
+ class: TRAModel
+ module_path: src/model.py
+ kwargs:
+ lr: 0.0002
+ n_epochs: 500
+ max_steps_per_epoch: 100
+ early_stop: 20
+ seed: 1000
+ logdir: output/test/transformer_tra_init
+ model_type: Transformer
+ model_config: *model_config
+ tra_config: *tra_config
+ lamb: 1.0
+ rho: 0.99
+ freeze_model: False
+ model_init_state:
+ dataset:
+ class: MTSDatasetH
+ module_path: src/dataset.py
+ kwargs:
+ handler:
+ class: DataHandler
+ module_path: qlib.data.dataset.handler
+ kwargs:
+ data_loader: *data_loader_config
+ segments:
+ train: [2007-10-30, 2016-05-27]
+ valid: [2016-09-26, 2018-05-29]
+ test: [2018-09-21, 2020-06-30]
+ seq_len: 60
+ horizon: 21
+ num_states: *num_states
+ batch_size: 512
\ No newline at end of file
diff --git a/examples/benchmarks/TRA/data/README.md b/examples/benchmarks/TRA/data/README.md
new file mode 100644
index 0000000000..2362fbf17e
--- /dev/null
+++ b/examples/benchmarks/TRA/data/README.md
@@ -0,0 +1 @@
+Data Link: https://drive.google.com/drive/folders/1fMqZYSeLyrHiWmVzygeI4sw3vp5Gt8cY?usp=sharing
diff --git a/examples/benchmarks/TRA/example.py b/examples/benchmarks/TRA/example.py
new file mode 100644
index 0000000000..defacf412a
--- /dev/null
+++ b/examples/benchmarks/TRA/example.py
@@ -0,0 +1,39 @@
+import argparse
+
+import qlib
+import ruamel.yaml as yaml
+from qlib.utils import init_instance_by_config
+
+
+def main(seed, config_file="configs/config_alstm.yaml"):
+
+ # set random seed
+ with open(config_file) as f:
+ config = yaml.safe_load(f)
+
+ # seed_suffix = "/seed1000" if "init" in config_file else f"/seed{seed}"
+ seed_suffix = ""
+ config["task"]["model"]["kwargs"].update(
+ {"seed": seed, "logdir": config["task"]["model"]["kwargs"]["logdir"] + seed_suffix}
+ )
+
+ # initialize workflow
+ qlib.init(
+ provider_uri=config["qlib_init"]["provider_uri"],
+ region=config["qlib_init"]["region"],
+ )
+ dataset = init_instance_by_config(config["task"]["dataset"])
+ model = init_instance_by_config(config["task"]["model"])
+
+ # train model
+ model.fit(dataset)
+
+
+if __name__ == "__main__":
+
+ # set params from cmd
+ parser = argparse.ArgumentParser(allow_abbrev=False)
+ parser.add_argument("--seed", type=int, default=1000, help="random seed")
+ parser.add_argument("--config_file", type=str, default="configs/config_alstm.yaml", help="config file")
+ args = parser.parse_args()
+ main(**vars(args))
diff --git a/examples/benchmarks/TRA/run.sh b/examples/benchmarks/TRA/run.sh
new file mode 100644
index 0000000000..d9428b29b0
--- /dev/null
+++ b/examples/benchmarks/TRA/run.sh
@@ -0,0 +1,29 @@
+#!/bin/bash
+
+# we used random seed(1 1000 2000 3000 4000 5000) in our experiments
+
+# Directly run from Qlib command `qrun`
+qrun configs/config_alstm.yaml
+
+qrun configs/config_transformer.yaml
+
+qrun configs/config_transformer_tra_init.yaml
+qrun configs/config_transformer_tra.yaml
+
+qrun configs/config_alstm_tra_init.yaml
+qrun configs/config_alstm_tra.yaml
+
+
+# Or setting different parameters with example.py
+python example.py --config_file configs/config_alstm.yaml
+
+python example.py --config_file configs/config_transformer.yaml
+
+python example.py --config_file configs/config_transformer_tra_init.yaml
+python example.py --config_file configs/config_transformer_tra.yaml
+
+python example.py --config_file configs/config_alstm_tra_init.yaml
+python example.py --config_file configs/config_alstm_tra.yaml
+
+
+
diff --git a/examples/benchmarks/TRA/src/dataset.py b/examples/benchmarks/TRA/src/dataset.py
new file mode 100644
index 0000000000..50c57d5818
--- /dev/null
+++ b/examples/benchmarks/TRA/src/dataset.py
@@ -0,0 +1,253 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import copy
+import torch
+import numpy as np
+import pandas as pd
+
+from qlib.utils import init_instance_by_config
+from qlib.data.dataset import DatasetH, DataHandler
+
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+
+
+def _to_tensor(x):
+ if not isinstance(x, torch.Tensor):
+ return torch.tensor(x, dtype=torch.float, device=device)
+ return x
+
+
+def _create_ts_slices(index, seq_len):
+ """
+ create time series slices from pandas index
+
+ Args:
+ index (pd.MultiIndex): pandas multiindex with order
+ seq_len (int): sequence length
+ """
+ assert index.is_lexsorted(), "index should be sorted"
+
+ # number of dates for each code
+ sample_count_by_codes = pd.Series(0, index=index).groupby(level=0).size().values
+
+ # start_index for each code
+ start_index_of_codes = np.roll(np.cumsum(sample_count_by_codes), 1)
+ start_index_of_codes[0] = 0
+
+ # all the [start, stop) indices of features
+ # features btw [start, stop) are used to predict the `stop - 1` label
+ slices = []
+ for cur_loc, cur_cnt in zip(start_index_of_codes, sample_count_by_codes):
+ for stop in range(1, cur_cnt + 1):
+ end = cur_loc + stop
+ start = max(end - seq_len, 0)
+ slices.append(slice(start, end))
+ slices = np.array(slices)
+
+ return slices
+
+
+def _get_date_parse_fn(target):
+ """get date parse function
+
+ This method is used to parse date arguments as target type.
+
+ Example:
+ get_date_parse_fn('20120101')('2017-01-01') => '20170101'
+ get_date_parse_fn(20120101)('2017-01-01') => 20170101
+ """
+ if isinstance(target, pd.Timestamp):
+ _fn = lambda x: pd.Timestamp(x) # Timestamp('2020-01-01')
+ elif isinstance(target, str) and len(target) == 8:
+ _fn = lambda x: str(x).replace("-", "")[:8] # '20200201'
+ elif isinstance(target, int):
+ _fn = lambda x: int(str(x).replace("-", "")[:8]) # 20200201
+ else:
+ _fn = lambda x: x
+ return _fn
+
+
+class MTSDatasetH(DatasetH):
+ """Memory Augmented Time Series Dataset
+
+ Args:
+ handler (DataHandler): data handler
+ segments (dict): data split segments
+ seq_len (int): time series sequence length
+ horizon (int): label horizon (to mask historical loss for TRA)
+ num_states (int): how many memory states to be added (for TRA)
+ batch_size (int): batch size (<0 means daily batch)
+ shuffle (bool): whether shuffle data
+ pin_memory (bool): whether pin data to gpu memory
+ drop_last (bool): whether drop last batch < batch_size
+ """
+
+ def __init__(
+ self,
+ handler,
+ segments,
+ seq_len=60,
+ horizon=0,
+ num_states=1,
+ batch_size=-1,
+ shuffle=True,
+ pin_memory=False,
+ drop_last=False,
+ **kwargs
+ ):
+
+ assert horizon > 0, "please specify `horizon` to avoid data leakage"
+
+ self.seq_len = seq_len
+ self.horizon = horizon
+ self.num_states = num_states
+ self.batch_size = batch_size
+ self.shuffle = shuffle
+ self.drop_last = drop_last
+ self.pin_memory = pin_memory
+ self.params = (batch_size, drop_last, shuffle) # for train/eval switch
+
+ super().__init__(handler, segments, **kwargs)
+
+ def setup_data(self, handler_kwargs: dict = None, **kwargs):
+
+ super().setup_data()
+
+ # change index to
+ # NOTE: we will use inplace sort to reduce memory use
+ df = self.handler._data
+ df.index = df.index.swaplevel()
+ df.sort_index(inplace=True)
+
+ self._data = df["feature"].values.astype("float32")
+ self._label = df["label"].squeeze().astype("float32")
+ self._index = df.index
+
+ # add memory to feature
+ self._data = np.c_[self._data, np.zeros((len(self._data), self.num_states), dtype=np.float32)]
+
+ # padding tensor
+ self.zeros = np.zeros((self.seq_len, self._data.shape[1]), dtype=np.float32)
+
+ # pin memory
+ if self.pin_memory:
+ self._data = _to_tensor(self._data)
+ self._label = _to_tensor(self._label)
+ self.zeros = _to_tensor(self.zeros)
+
+ # create batch slices
+ self.batch_slices = _create_ts_slices(self._index, self.seq_len)
+
+ # create daily slices
+ index = [slc.stop - 1 for slc in self.batch_slices]
+ act_index = self.restore_index(index)
+ daily_slices = {date: [] for date in sorted(act_index.unique(level=1))}
+ for i, (code, date) in enumerate(act_index):
+ daily_slices[date].append(self.batch_slices[i])
+ self.daily_slices = list(daily_slices.values())
+
+ def _prepare_seg(self, slc, **kwargs):
+ fn = _get_date_parse_fn(self._index[0][1])
+ start_date = fn(slc.start)
+ end_date = fn(slc.stop)
+ obj = copy.copy(self) # shallow copy
+ # NOTE: Seriable will disable copy `self._data` so we manually assign them here
+ obj._data = self._data
+ obj._label = self._label
+ obj._index = self._index
+ new_batch_slices = []
+ for batch_slc in self.batch_slices:
+ date = self._index[batch_slc.stop - 1][1]
+ if start_date <= date <= end_date:
+ new_batch_slices.append(batch_slc)
+ obj.batch_slices = np.array(new_batch_slices)
+ new_daily_slices = []
+ for daily_slc in self.daily_slices:
+ date = self._index[daily_slc[0].stop - 1][1]
+ if start_date <= date <= end_date:
+ new_daily_slices.append(daily_slc)
+ obj.daily_slices = new_daily_slices
+ return obj
+
+ def restore_index(self, index):
+ if isinstance(index, torch.Tensor):
+ index = index.cpu().numpy()
+ return self._index[index]
+
+ def assign_data(self, index, vals):
+ if isinstance(self._data, torch.Tensor):
+ vals = _to_tensor(vals)
+ elif isinstance(vals, torch.Tensor):
+ vals = vals.detach().cpu().numpy()
+ index = index.detach().cpu().numpy()
+ self._data[index, -self.num_states :] = vals
+
+ def clear_memory(self):
+ self._data[:, -self.num_states :] = 0
+
+ # TODO: better train/eval mode design
+ def train(self):
+ """enable traning mode"""
+ self.batch_size, self.drop_last, self.shuffle = self.params
+
+ def eval(self):
+ """enable evaluation mode"""
+ self.batch_size = -1
+ self.drop_last = False
+ self.shuffle = False
+
+ def _get_slices(self):
+ if self.batch_size < 0:
+ slices = self.daily_slices.copy()
+ batch_size = -1 * self.batch_size
+ else:
+ slices = self.batch_slices.copy()
+ batch_size = self.batch_size
+ return slices, batch_size
+
+ def __len__(self):
+ slices, batch_size = self._get_slices()
+ if self.drop_last:
+ return len(slices) // batch_size
+ return (len(slices) + batch_size - 1) // batch_size
+
+ def __iter__(self):
+ slices, batch_size = self._get_slices()
+ if self.shuffle:
+ np.random.shuffle(slices)
+
+ for i in range(len(slices))[::batch_size]:
+ if self.drop_last and i + batch_size > len(slices):
+ break
+ # get slices for this batch
+ slices_subset = slices[i : i + batch_size]
+ if self.batch_size < 0:
+ slices_subset = np.concatenate(slices_subset)
+ # collect data
+ data = []
+ label = []
+ index = []
+ for slc in slices_subset:
+ _data = self._data[slc].clone() if self.pin_memory else self._data[slc].copy()
+ if len(_data) != self.seq_len:
+ if self.pin_memory:
+ _data = torch.cat([self.zeros[: self.seq_len - len(_data)], _data], axis=0)
+ else:
+ _data = np.concatenate([self.zeros[: self.seq_len - len(_data)], _data], axis=0)
+ if self.num_states > 0:
+ _data[-self.horizon :, -self.num_states :] = 0
+ data.append(_data)
+ label.append(self._label[slc.stop - 1])
+ index.append(slc.stop - 1)
+ # concate
+ index = torch.tensor(index, device=device)
+ if isinstance(data[0], torch.Tensor):
+ data = torch.stack(data)
+ label = torch.stack(label)
+ else:
+ data = _to_tensor(np.stack(data))
+ label = _to_tensor(np.stack(label))
+ # yield -> generator
+ yield {"data": data, "label": label, "index": index}
diff --git a/examples/benchmarks/TRA/src/model.py b/examples/benchmarks/TRA/src/model.py
new file mode 100644
index 0000000000..b1b5e27f69
--- /dev/null
+++ b/examples/benchmarks/TRA/src/model.py
@@ -0,0 +1,603 @@
+# Copyright (c) Microsoft Corporation.
+# Licensed under the MIT License.
+
+import os
+import copy
+import math
+import json
+import collections
+import numpy as np
+import pandas as pd
+
+import torch
+import torch.nn as nn
+import torch.optim as optim
+import torch.nn.functional as F
+
+from tqdm import tqdm
+
+from qlib.utils import get_or_create_path
+from qlib.log import get_module_logger
+from qlib.model.base import Model
+
+device = "cuda" if torch.cuda.is_available() else "cpu"
+
+
+class TRAModel(Model):
+ def __init__(
+ self,
+ model_config,
+ tra_config,
+ model_type="LSTM",
+ lr=1e-3,
+ n_epochs=500,
+ early_stop=50,
+ smooth_steps=5,
+ max_steps_per_epoch=None,
+ freeze_model=False,
+ model_init_state=None,
+ lamb=0.0,
+ rho=0.99,
+ seed=0,
+ logdir=None,
+ eval_train=True,
+ eval_test=False,
+ avg_params=True,
+ **kwargs,
+ ):
+
+ np.random.seed(seed)
+ torch.manual_seed(seed)
+
+ self.logger = get_module_logger("TRA")
+ self.logger.info("TRA Model...")
+
+ self.model = eval(model_type)(**model_config).to(device)
+ if model_init_state:
+ self.model.load_state_dict(torch.load(model_init_state, map_location="cpu")["model"])
+ if freeze_model:
+ for param in self.model.parameters():
+ param.requires_grad_(False)
+ else:
+ self.logger.info("# model params: %d" % sum([p.numel() for p in self.model.parameters()]))
+
+ self.tra = TRA(self.model.output_size, **tra_config).to(device)
+ self.logger.info("# tra params: %d" % sum([p.numel() for p in self.tra.parameters()]))
+
+ self.optimizer = optim.Adam(list(self.model.parameters()) + list(self.tra.parameters()), lr=lr)
+
+ self.model_config = model_config
+ self.tra_config = tra_config
+ self.lr = lr
+ self.n_epochs = n_epochs
+ self.early_stop = early_stop
+ self.smooth_steps = smooth_steps
+ self.max_steps_per_epoch = max_steps_per_epoch
+ self.lamb = lamb
+ self.rho = rho
+ self.seed = seed
+ self.logdir = logdir
+ self.eval_train = eval_train
+ self.eval_test = eval_test
+ self.avg_params = avg_params
+
+ if self.tra.num_states > 1 and not self.eval_train:
+ self.logger.warn("`eval_train` will be ignored when using TRA")
+
+ if self.logdir is not None:
+ if os.path.exists(self.logdir):
+ self.logger.warn(f"logdir {self.logdir} is not empty")
+ os.makedirs(self.logdir, exist_ok=True)
+
+ self.fitted = False
+ self.global_step = -1
+
+ def train_epoch(self, data_set):
+
+ self.model.train()
+ self.tra.train()
+
+ data_set.train()
+
+ max_steps = self.n_epochs
+ if self.max_steps_per_epoch is not None:
+ max_steps = min(self.max_steps_per_epoch, self.n_epochs)
+
+ count = 0
+ total_loss = 0
+ total_count = 0
+ for batch in tqdm(data_set, total=max_steps):
+ count += 1
+ if count > max_steps:
+ break
+
+ self.global_step += 1
+
+ data, label, index = batch["data"], batch["label"], batch["index"]
+
+ feature = data[:, :, : -self.tra.num_states]
+ hist_loss = data[:, : -data_set.horizon, -self.tra.num_states :]
+
+ hidden = self.model(feature)
+ pred, all_preds, prob = self.tra(hidden, hist_loss)
+
+ loss = (pred - label).pow(2).mean()
+
+ L = (all_preds.detach() - label[:, None]).pow(2)
+ L -= L.min(dim=-1, keepdim=True).values # normalize & ensure postive input
+
+ data_set.assign_data(index, L) # save loss to memory
+
+ if prob is not None:
+ P = sinkhorn(-L, epsilon=0.01) # sample assignment matrix
+ lamb = self.lamb * (self.rho ** self.global_step)
+ reg = prob.log().mul(P).sum(dim=-1).mean()
+ loss = loss - lamb * reg
+
+ loss.backward()
+ self.optimizer.step()
+ self.optimizer.zero_grad()
+
+ total_loss += loss.item()
+ total_count += len(pred)
+
+ total_loss /= total_count
+
+ return total_loss
+
+ def test_epoch(self, data_set, return_pred=False):
+
+ self.model.eval()
+ self.tra.eval()
+ data_set.eval()
+
+ preds = []
+ metrics = []
+ for batch in tqdm(data_set):
+ data, label, index = batch["data"], batch["label"], batch["index"]
+
+ feature = data[:, :, : -self.tra.num_states]
+ hist_loss = data[:, : -data_set.horizon, -self.tra.num_states :]
+
+ with torch.no_grad():
+ hidden = self.model(feature)
+ pred, all_preds, prob = self.tra(hidden, hist_loss)
+
+ L = (all_preds - label[:, None]).pow(2)
+
+ L -= L.min(dim=-1, keepdim=True).values # normalize & ensure postive input
+
+ data_set.assign_data(index, L) # save loss to memory
+
+ X = np.c_[
+ pred.cpu().numpy(),
+ label.cpu().numpy(),
+ ]
+ columns = ["score", "label"]
+ if prob is not None:
+ X = np.c_[X, all_preds.cpu().numpy(), prob.cpu().numpy()]
+ columns += ["score_%d" % d for d in range(all_preds.shape[1])] + [
+ "prob_%d" % d for d in range(all_preds.shape[1])
+ ]
+
+ pred = pd.DataFrame(X, index=index.cpu().numpy(), columns=columns)
+
+ metrics.append(evaluate(pred))
+
+ if return_pred:
+ preds.append(pred)
+
+ metrics = pd.DataFrame(metrics)
+ metrics = {
+ "MSE": metrics.MSE.mean(),
+ "MAE": metrics.MAE.mean(),
+ "IC": metrics.IC.mean(),
+ "ICIR": metrics.IC.mean() / metrics.IC.std(),
+ }
+
+ if return_pred:
+ preds = pd.concat(preds, axis=0)
+ preds.index = data_set.restore_index(preds.index)
+ preds.index = preds.index.swaplevel()
+ preds.sort_index(inplace=True)
+
+ return metrics, preds
+
+ def fit(self, dataset, evals_result=dict()):
+
+ train_set, valid_set, test_set = dataset.prepare(["train", "valid", "test"])
+
+ best_score = -1
+ best_epoch = 0
+ stop_rounds = 0
+ best_params = {
+ "model": copy.deepcopy(self.model.state_dict()),
+ "tra": copy.deepcopy(self.tra.state_dict()),
+ }
+ params_list = {
+ "model": collections.deque(maxlen=self.smooth_steps),
+ "tra": collections.deque(maxlen=self.smooth_steps),
+ }
+ evals_result["train"] = []
+ evals_result["valid"] = []
+ evals_result["test"] = []
+
+ # train
+ self.fitted = True
+ self.global_step = -1
+
+ if self.tra.num_states > 1:
+ self.logger.info("init memory...")
+ self.test_epoch(train_set)
+
+ for epoch in range(self.n_epochs):
+ self.logger.info("Epoch %d:", epoch)
+
+ self.logger.info("training...")
+ self.train_epoch(train_set)
+
+ self.logger.info("evaluating...")
+ # average params for inference
+ params_list["model"].append(copy.deepcopy(self.model.state_dict()))
+ params_list["tra"].append(copy.deepcopy(self.tra.state_dict()))
+ self.model.load_state_dict(average_params(params_list["model"]))
+ self.tra.load_state_dict(average_params(params_list["tra"]))
+
+ # NOTE: during evaluating, the whole memory will be refreshed
+ if self.tra.num_states > 1 or self.eval_train:
+ train_set.clear_memory() # NOTE: clear the shared memory
+ train_metrics = self.test_epoch(train_set)[0]
+ evals_result["train"].append(train_metrics)
+ self.logger.info("\ttrain metrics: %s" % train_metrics)
+
+ valid_metrics = self.test_epoch(valid_set)[0]
+ evals_result["valid"].append(valid_metrics)
+ self.logger.info("\tvalid metrics: %s" % valid_metrics)
+
+ if self.eval_test:
+ test_metrics = self.test_epoch(test_set)[0]
+ evals_result["test"].append(test_metrics)
+ self.logger.info("\ttest metrics: %s" % test_metrics)
+
+ if valid_metrics["IC"] > best_score:
+ best_score = valid_metrics["IC"]
+ stop_rounds = 0
+ best_epoch = epoch
+ best_params = {
+ "model": copy.deepcopy(self.model.state_dict()),
+ "tra": copy.deepcopy(self.tra.state_dict()),
+ }
+ else:
+ stop_rounds += 1
+ if stop_rounds >= self.early_stop:
+ self.logger.info("early stop @ %s" % epoch)
+ break
+
+ # restore parameters
+ self.model.load_state_dict(params_list["model"][-1])
+ self.tra.load_state_dict(params_list["tra"][-1])
+
+ self.logger.info("best score: %.6lf @ %d" % (best_score, best_epoch))
+ self.model.load_state_dict(best_params["model"])
+ self.tra.load_state_dict(best_params["tra"])
+
+ metrics, preds = self.test_epoch(test_set, return_pred=True)
+ self.logger.info("test metrics: %s" % metrics)
+
+ if self.logdir:
+ self.logger.info("save model & pred to local directory")
+
+ pd.concat({name: pd.DataFrame(evals_result[name]) for name in evals_result}, axis=1).to_csv(
+ self.logdir + "/logs.csv", index=False
+ )
+
+ torch.save(best_params, self.logdir + "/model.bin")
+
+ preds.to_pickle(self.logdir + "/pred.pkl")
+
+ info = {
+ "config": {
+ "model_config": self.model_config,
+ "tra_config": self.tra_config,
+ "lr": self.lr,
+ "n_epochs": self.n_epochs,
+ "early_stop": self.early_stop,
+ "smooth_steps": self.smooth_steps,
+ "max_steps_per_epoch": self.max_steps_per_epoch,
+ "lamb": self.lamb,
+ "rho": self.rho,
+ "seed": self.seed,
+ "logdir": self.logdir,
+ },
+ "best_eval_metric": -best_score, # NOTE: minux -1 for minimize
+ "metric": metrics,
+ }
+ with open(self.logdir + "/info.json", "w") as f:
+ json.dump(info, f)
+
+ def predict(self, dataset, segment="test"):
+ if not self.fitted:
+ raise ValueError("model is not fitted yet!")
+
+ test_set = dataset.prepare(segment)
+
+ metrics, preds = self.test_epoch(test_set, return_pred=True)
+ self.logger.info("test metrics: %s" % metrics)
+
+ return preds
+
+
+class LSTM(nn.Module):
+
+ """LSTM Model
+
+ Args:
+ input_size (int): input size (# features)
+ hidden_size (int): hidden size
+ num_layers (int): number of hidden layers
+ use_attn (bool): whether use attention layer.
+ we use concat attention as https://github.com/fulifeng/Adv-ALSTM/
+ dropout (float): dropout rate
+ input_drop (float): input dropout for data augmentation
+ noise_level (float): add gaussian noise to input for data augmentation
+ """
+
+ def __init__(
+ self,
+ input_size=16,
+ hidden_size=64,
+ num_layers=2,
+ use_attn=True,
+ dropout=0.0,
+ input_drop=0.0,
+ noise_level=0.0,
+ *args,
+ **kwargs,
+ ):
+ super().__init__()
+
+ self.input_size = input_size
+ self.hidden_size = hidden_size
+ self.num_layers = num_layers
+ self.use_attn = use_attn
+ self.noise_level = noise_level
+
+ self.input_drop = nn.Dropout(input_drop)
+
+ self.rnn = nn.LSTM(
+ input_size=input_size,
+ hidden_size=hidden_size,
+ num_layers=num_layers,
+ batch_first=True,
+ dropout=dropout,
+ )
+
+ if self.use_attn:
+ self.W = nn.Linear(hidden_size, hidden_size)
+ self.u = nn.Linear(hidden_size, 1, bias=False)
+ self.output_size = hidden_size * 2
+ else:
+ self.output_size = hidden_size
+
+ def forward(self, x):
+
+ x = self.input_drop(x)
+
+ if self.training and self.noise_level > 0:
+ noise = torch.randn_like(x).to(x)
+ x = x + noise * self.noise_level
+
+ rnn_out, _ = self.rnn(x)
+ last_out = rnn_out[:, -1]
+
+ if self.use_attn:
+ laten = self.W(rnn_out).tanh()
+ scores = self.u(laten).softmax(dim=1)
+ att_out = (rnn_out * scores).sum(dim=1).squeeze()
+ last_out = torch.cat([last_out, att_out], dim=1)
+
+ return last_out
+
+
+class PositionalEncoding(nn.Module):
+ # reference: https://pytorch.org/tutorials/beginner/transformer_tutorial.html
+ def __init__(self, d_model, dropout=0.1, max_len=5000):
+ super(PositionalEncoding, self).__init__()
+ self.dropout = nn.Dropout(p=dropout)
+
+ pe = torch.zeros(max_len, d_model)
+ position = torch.arange(0, max_len, dtype=torch.float).unsqueeze(1)
+ div_term = torch.exp(torch.arange(0, d_model, 2).float() * (-math.log(10000.0) / d_model))
+ pe[:, 0::2] = torch.sin(position * div_term)
+ pe[:, 1::2] = torch.cos(position * div_term)
+ pe = pe.unsqueeze(0).transpose(0, 1)
+ self.register_buffer("pe", pe)
+
+ def forward(self, x):
+ x = x + self.pe[: x.size(0), :]
+ return self.dropout(x)
+
+
+class Transformer(nn.Module):
+
+ """Transformer Model
+
+ Args:
+ input_size (int): input size (# features)
+ hidden_size (int): hidden size
+ num_layers (int): number of transformer layers
+ num_heads (int): number of heads in transformer
+ dropout (float): dropout rate
+ input_drop (float): input dropout for data augmentation
+ noise_level (float): add gaussian noise to input for data augmentation
+ """
+
+ def __init__(
+ self,
+ input_size=16,
+ hidden_size=64,
+ num_layers=2,
+ num_heads=2,
+ dropout=0.0,
+ input_drop=0.0,
+ noise_level=0.0,
+ **kwargs,
+ ):
+ super().__init__()
+
+ self.input_size = input_size
+ self.hidden_size = hidden_size
+ self.num_layers = num_layers
+ self.num_heads = num_heads
+ self.noise_level = noise_level
+
+ self.input_drop = nn.Dropout(input_drop)
+
+ self.input_proj = nn.Linear(input_size, hidden_size)
+
+ self.pe = PositionalEncoding(input_size, dropout)
+ layer = nn.TransformerEncoderLayer(
+ nhead=num_heads, dropout=dropout, d_model=hidden_size, dim_feedforward=hidden_size * 4
+ )
+ self.encoder = nn.TransformerEncoder(layer, num_layers=num_layers)
+
+ self.output_size = hidden_size
+
+ def forward(self, x):
+
+ x = self.input_drop(x)
+
+ if self.training and self.noise_level > 0:
+ noise = torch.randn_like(x).to(x)
+ x = x + noise * self.noise_level
+
+ x = x.permute(1, 0, 2).contiguous() # the first dim need to be sequence
+ x = self.pe(x)
+
+ x = self.input_proj(x)
+ out = self.encoder(x)
+
+ return out[-1]
+
+
+class TRA(nn.Module):
+
+ """Temporal Routing Adaptor (TRA)
+
+ TRA takes historical prediction erros & latent representation as inputs,
+ then routes the input sample to a specific predictor for training & inference.
+
+ Args:
+ input_size (int): input size (RNN/Transformer's hidden size)
+ num_states (int): number of latent states (i.e., trading patterns)
+ If `num_states=1`, then TRA falls back to traditional methods
+ hidden_size (int): hidden size of the router
+ tau (float): gumbel softmax temperature
+ """
+
+ def __init__(self, input_size, num_states=1, hidden_size=8, tau=1.0, src_info="LR_TPE"):
+ super().__init__()
+
+ self.num_states = num_states
+ self.tau = tau
+ self.src_info = src_info
+
+ if num_states > 1:
+ self.router = nn.LSTM(
+ input_size=num_states,
+ hidden_size=hidden_size,
+ num_layers=1,
+ batch_first=True,
+ )
+ self.fc = nn.Linear(hidden_size + input_size, num_states)
+
+ self.predictors = nn.Linear(input_size, num_states)
+
+ def forward(self, hidden, hist_loss):
+
+ preds = self.predictors(hidden)
+
+ if self.num_states == 1:
+ return preds.squeeze(-1), preds, None
+
+ # information type
+ router_out, _ = self.router(hist_loss)
+ if "LR" in self.src_info:
+ latent_representation = hidden
+ else:
+ latent_representation = torch.randn(hidden.shape).to(hidden)
+ if "TPE" in self.src_info:
+ temporal_pred_error = router_out[:, -1]
+ else:
+ temporal_pred_error = torch.randn(router_out[:, -1].shape).to(hidden)
+
+ out = self.fc(torch.cat([temporal_pred_error, latent_representation], dim=-1))
+ prob = F.gumbel_softmax(out, dim=-1, tau=self.tau, hard=False)
+
+ if self.training:
+ final_pred = (preds * prob).sum(dim=-1)
+ else:
+ final_pred = preds[range(len(preds)), prob.argmax(dim=-1)]
+
+ return final_pred, preds, prob
+
+
+def evaluate(pred):
+ pred = pred.rank(pct=True) # transform into percentiles
+ score = pred.score
+ label = pred.label
+ diff = score - label
+ MSE = (diff ** 2).mean()
+ MAE = (diff.abs()).mean()
+ IC = score.corr(label)
+ return {"MSE": MSE, "MAE": MAE, "IC": IC}
+
+
+def average_params(params_list):
+ assert isinstance(params_list, (tuple, list, collections.deque))
+ n = len(params_list)
+ if n == 1:
+ return params_list[0]
+ new_params = collections.OrderedDict()
+ keys = None
+ for i, params in enumerate(params_list):
+ if keys is None:
+ keys = params.keys()
+ for k, v in params.items():
+ if k not in keys:
+ raise ValueError("the %d-th model has different params" % i)
+ if k not in new_params:
+ new_params[k] = v / n
+ else:
+ new_params[k] += v / n
+ return new_params
+
+
+def shoot_infs(inp_tensor):
+ """Replaces inf by maximum of tensor"""
+ mask_inf = torch.isinf(inp_tensor)
+ ind_inf = torch.nonzero(mask_inf, as_tuple=False)
+ if len(ind_inf) > 0:
+ for ind in ind_inf:
+ if len(ind) == 2:
+ inp_tensor[ind[0], ind[1]] = 0
+ elif len(ind) == 1:
+ inp_tensor[ind[0]] = 0
+ m = torch.max(inp_tensor)
+ for ind in ind_inf:
+ if len(ind) == 2:
+ inp_tensor[ind[0], ind[1]] = m
+ elif len(ind) == 1:
+ inp_tensor[ind[0]] = m
+ return inp_tensor
+
+
+def sinkhorn(Q, n_iters=3, epsilon=0.01):
+ # epsilon should be adjusted according to logits value's scale
+ with torch.no_grad():
+ Q = shoot_infs(Q)
+ Q = torch.exp(Q / epsilon)
+ for i in range(n_iters):
+ Q /= Q.sum(dim=0, keepdim=True)
+ Q /= Q.sum(dim=1, keepdim=True)
+ return Q